Index: third_party/libcxx/include/atomic |
=================================================================== |
--- third_party/libcxx/include/atomic (revision 0) |
+++ third_party/libcxx/include/atomic (revision 0) |
@@ -0,0 +1,1531 @@ |
+// -*- C++ -*- |
+//===--------------------------- atomic -----------------------------------===// |
+// |
+// The LLVM Compiler Infrastructure |
+// |
+// This file is distributed under the University of Illinois Open Source |
+// License. See LICENSE.TXT for details. |
+// |
+//===----------------------------------------------------------------------===// |
+ |
+#ifndef _LIBCPP_ATOMIC |
+#define _LIBCPP_ATOMIC |
+ |
+/* |
+ atomic synopsis |
+ |
+namespace std |
+{ |
+ |
+// order and consistency |
+ |
+typedef enum memory_order |
+{ |
+ memory_order_relaxed, |
+ memory_order_consume, // load-consume |
+ memory_order_acquire, // load-acquire |
+ memory_order_release, // store-release |
+ memory_order_acq_rel, // store-release load-acquire |
+ memory_order_seq_cst // store-release load-acquire |
+} memory_order; |
+ |
+template <class T> T kill_dependency(T y) noexcept; |
+ |
+// lock-free property |
+ |
+#define ATOMIC_BOOL_LOCK_FREE unspecified |
+#define ATOMIC_CHAR_LOCK_FREE unspecified |
+#define ATOMIC_CHAR16_T_LOCK_FREE unspecified |
+#define ATOMIC_CHAR32_T_LOCK_FREE unspecified |
+#define ATOMIC_WCHAR_T_LOCK_FREE unspecified |
+#define ATOMIC_SHORT_LOCK_FREE unspecified |
+#define ATOMIC_INT_LOCK_FREE unspecified |
+#define ATOMIC_LONG_LOCK_FREE unspecified |
+#define ATOMIC_LLONG_LOCK_FREE unspecified |
+#define ATOMIC_POINTER_LOCK_FREE unspecified |
+ |
+// flag type and operations |
+ |
+typedef struct atomic_flag |
+{ |
+ bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept; |
+ bool test_and_set(memory_order m = memory_order_seq_cst) noexcept; |
+ void clear(memory_order m = memory_order_seq_cst) volatile noexcept; |
+ void clear(memory_order m = memory_order_seq_cst) noexcept; |
+ atomic_flag() noexcept = default; |
+ atomic_flag(const atomic_flag&) = delete; |
+ atomic_flag& operator=(const atomic_flag&) = delete; |
+ atomic_flag& operator=(const atomic_flag&) volatile = delete; |
+} atomic_flag; |
+ |
+bool |
+ atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept; |
+ |
+bool |
+ atomic_flag_test_and_set(atomic_flag* obj) noexcept; |
+ |
+bool |
+ atomic_flag_test_and_set_explicit(volatile atomic_flag* obj, |
+ memory_order m) noexcept; |
+ |
+bool |
+ atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept; |
+ |
+void |
+ atomic_flag_clear(volatile atomic_flag* obj) noexcept; |
+ |
+void |
+ atomic_flag_clear(atomic_flag* obj) noexcept; |
+ |
+void |
+ atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept; |
+ |
+void |
+ atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept; |
+ |
+#define ATOMIC_FLAG_INIT see below |
+#define ATOMIC_VAR_INIT(value) see below |
+ |
+template <class T> |
+struct atomic |
+{ |
+ bool is_lock_free() const volatile noexcept; |
+ bool is_lock_free() const noexcept; |
+ void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ void store(T desr, memory_order m = memory_order_seq_cst) noexcept; |
+ T load(memory_order m = memory_order_seq_cst) const volatile noexcept; |
+ T load(memory_order m = memory_order_seq_cst) const noexcept; |
+ operator T() const volatile noexcept; |
+ operator T() const noexcept; |
+ T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept; |
+ bool compare_exchange_weak(T& expc, T desr, |
+ memory_order s, memory_order f) volatile noexcept; |
+ bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept; |
+ bool compare_exchange_strong(T& expc, T desr, |
+ memory_order s, memory_order f) volatile noexcept; |
+ bool compare_exchange_strong(T& expc, T desr, |
+ memory_order s, memory_order f) noexcept; |
+ bool compare_exchange_weak(T& expc, T desr, |
+ memory_order m = memory_order_seq_cst) volatile noexcept; |
+ bool compare_exchange_weak(T& expc, T desr, |
+ memory_order m = memory_order_seq_cst) noexcept; |
+ bool compare_exchange_strong(T& expc, T desr, |
+ memory_order m = memory_order_seq_cst) volatile noexcept; |
+ bool compare_exchange_strong(T& expc, T desr, |
+ memory_order m = memory_order_seq_cst) noexcept; |
+ |
+ atomic() noexcept = default; |
+ constexpr atomic(T desr) noexcept; |
+ atomic(const atomic&) = delete; |
+ atomic& operator=(const atomic&) = delete; |
+ atomic& operator=(const atomic&) volatile = delete; |
+ T operator=(T) volatile noexcept; |
+ T operator=(T) noexcept; |
+}; |
+ |
+template <> |
+struct atomic<integral> |
+{ |
+ bool is_lock_free() const volatile noexcept; |
+ bool is_lock_free() const noexcept; |
+ void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ void store(integral desr, memory_order m = memory_order_seq_cst) noexcept; |
+ integral load(memory_order m = memory_order_seq_cst) const volatile noexcept; |
+ integral load(memory_order m = memory_order_seq_cst) const noexcept; |
+ operator integral() const volatile noexcept; |
+ operator integral() const noexcept; |
+ integral exchange(integral desr, |
+ memory_order m = memory_order_seq_cst) volatile noexcept; |
+ integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept; |
+ bool compare_exchange_weak(integral& expc, integral desr, |
+ memory_order s, memory_order f) volatile noexcept; |
+ bool compare_exchange_weak(integral& expc, integral desr, |
+ memory_order s, memory_order f) noexcept; |
+ bool compare_exchange_strong(integral& expc, integral desr, |
+ memory_order s, memory_order f) volatile noexcept; |
+ bool compare_exchange_strong(integral& expc, integral desr, |
+ memory_order s, memory_order f) noexcept; |
+ bool compare_exchange_weak(integral& expc, integral desr, |
+ memory_order m = memory_order_seq_cst) volatile noexcept; |
+ bool compare_exchange_weak(integral& expc, integral desr, |
+ memory_order m = memory_order_seq_cst) noexcept; |
+ bool compare_exchange_strong(integral& expc, integral desr, |
+ memory_order m = memory_order_seq_cst) volatile noexcept; |
+ bool compare_exchange_strong(integral& expc, integral desr, |
+ memory_order m = memory_order_seq_cst) noexcept; |
+ |
+ integral |
+ fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept; |
+ integral |
+ fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept; |
+ integral |
+ fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept; |
+ integral |
+ fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept; |
+ integral |
+ fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept; |
+ |
+ atomic() noexcept = default; |
+ constexpr atomic(integral desr) noexcept; |
+ atomic(const atomic&) = delete; |
+ atomic& operator=(const atomic&) = delete; |
+ atomic& operator=(const atomic&) volatile = delete; |
+ integral operator=(integral desr) volatile noexcept; |
+ integral operator=(integral desr) noexcept; |
+ |
+ integral operator++(int) volatile noexcept; |
+ integral operator++(int) noexcept; |
+ integral operator--(int) volatile noexcept; |
+ integral operator--(int) noexcept; |
+ integral operator++() volatile noexcept; |
+ integral operator++() noexcept; |
+ integral operator--() volatile noexcept; |
+ integral operator--() noexcept; |
+ integral operator+=(integral op) volatile noexcept; |
+ integral operator+=(integral op) noexcept; |
+ integral operator-=(integral op) volatile noexcept; |
+ integral operator-=(integral op) noexcept; |
+ integral operator&=(integral op) volatile noexcept; |
+ integral operator&=(integral op) noexcept; |
+ integral operator|=(integral op) volatile noexcept; |
+ integral operator|=(integral op) noexcept; |
+ integral operator^=(integral op) volatile noexcept; |
+ integral operator^=(integral op) noexcept; |
+}; |
+ |
+template <class T> |
+struct atomic<T*> |
+{ |
+ bool is_lock_free() const volatile noexcept; |
+ bool is_lock_free() const noexcept; |
+ void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ void store(T* desr, memory_order m = memory_order_seq_cst) noexcept; |
+ T* load(memory_order m = memory_order_seq_cst) const volatile noexcept; |
+ T* load(memory_order m = memory_order_seq_cst) const noexcept; |
+ operator T*() const volatile noexcept; |
+ operator T*() const noexcept; |
+ T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept; |
+ bool compare_exchange_weak(T*& expc, T* desr, |
+ memory_order s, memory_order f) volatile noexcept; |
+ bool compare_exchange_weak(T*& expc, T* desr, |
+ memory_order s, memory_order f) noexcept; |
+ bool compare_exchange_strong(T*& expc, T* desr, |
+ memory_order s, memory_order f) volatile noexcept; |
+ bool compare_exchange_strong(T*& expc, T* desr, |
+ memory_order s, memory_order f) noexcept; |
+ bool compare_exchange_weak(T*& expc, T* desr, |
+ memory_order m = memory_order_seq_cst) volatile noexcept; |
+ bool compare_exchange_weak(T*& expc, T* desr, |
+ memory_order m = memory_order_seq_cst) noexcept; |
+ bool compare_exchange_strong(T*& expc, T* desr, |
+ memory_order m = memory_order_seq_cst) volatile noexcept; |
+ bool compare_exchange_strong(T*& expc, T* desr, |
+ memory_order m = memory_order_seq_cst) noexcept; |
+ T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; |
+ T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; |
+ T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; |
+ |
+ atomic() noexcept = default; |
+ constexpr atomic(T* desr) noexcept; |
+ atomic(const atomic&) = delete; |
+ atomic& operator=(const atomic&) = delete; |
+ atomic& operator=(const atomic&) volatile = delete; |
+ |
+ T* operator=(T*) volatile noexcept; |
+ T* operator=(T*) noexcept; |
+ T* operator++(int) volatile noexcept; |
+ T* operator++(int) noexcept; |
+ T* operator--(int) volatile noexcept; |
+ T* operator--(int) noexcept; |
+ T* operator++() volatile noexcept; |
+ T* operator++() noexcept; |
+ T* operator--() volatile noexcept; |
+ T* operator--() noexcept; |
+ T* operator+=(ptrdiff_t op) volatile noexcept; |
+ T* operator+=(ptrdiff_t op) noexcept; |
+ T* operator-=(ptrdiff_t op) volatile noexcept; |
+ T* operator-=(ptrdiff_t op) noexcept; |
+}; |
+ |
+ |
+template <class T> |
+ bool |
+ atomic_is_lock_free(const volatile atomic<T>* obj) noexcept; |
+ |
+template <class T> |
+ bool |
+ atomic_is_lock_free(const atomic<T>* obj) noexcept; |
+ |
+template <class T> |
+ void |
+ atomic_init(volatile atomic<T>* obj, T desr) noexcept; |
+ |
+template <class T> |
+ void |
+ atomic_init(atomic<T>* obj, T desr) noexcept; |
+ |
+template <class T> |
+ void |
+ atomic_store(volatile atomic<T>* obj, T desr) noexcept; |
+ |
+template <class T> |
+ void |
+ atomic_store(atomic<T>* obj, T desr) noexcept; |
+ |
+template <class T> |
+ void |
+ atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept; |
+ |
+template <class T> |
+ void |
+ atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept; |
+ |
+template <class T> |
+ T |
+ atomic_load(const volatile atomic<T>* obj) noexcept; |
+ |
+template <class T> |
+ T |
+ atomic_load(const atomic<T>* obj) noexcept; |
+ |
+template <class T> |
+ T |
+ atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept; |
+ |
+template <class T> |
+ T |
+ atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept; |
+ |
+template <class T> |
+ T |
+ atomic_exchange(volatile atomic<T>* obj, T desr) noexcept; |
+ |
+template <class T> |
+ T |
+ atomic_exchange(atomic<T>* obj, T desr) noexcept; |
+ |
+template <class T> |
+ T |
+ atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept; |
+ |
+template <class T> |
+ T |
+ atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept; |
+ |
+template <class T> |
+ bool |
+ atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept; |
+ |
+template <class T> |
+ bool |
+ atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept; |
+ |
+template <class T> |
+ bool |
+ atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept; |
+ |
+template <class T> |
+ bool |
+ atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept; |
+ |
+template <class T> |
+ bool |
+ atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc, |
+ T desr, |
+ memory_order s, memory_order f) noexcept; |
+ |
+template <class T> |
+ bool |
+ atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr, |
+ memory_order s, memory_order f) noexcept; |
+ |
+template <class T> |
+ bool |
+ atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj, |
+ T* expc, T desr, |
+ memory_order s, memory_order f) noexcept; |
+ |
+template <class T> |
+ bool |
+ atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc, |
+ T desr, |
+ memory_order s, memory_order f) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op, |
+ memory_order m) noexcept; |
+template <class Integral> |
+ Integral |
+ atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op, |
+ memory_order m) noexcept; |
+template <class Integral> |
+ Integral |
+ atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op, |
+ memory_order m) noexcept; |
+template <class Integral> |
+ Integral |
+ atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op, |
+ memory_order m) noexcept; |
+template <class Integral> |
+ Integral |
+ atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op, |
+ memory_order m) noexcept; |
+template <class Integral> |
+ Integral |
+ atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op, |
+ memory_order m) noexcept; |
+template <class Integral> |
+ Integral |
+ atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op, |
+ memory_order m) noexcept; |
+template <class Integral> |
+ Integral |
+ atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op, |
+ memory_order m) noexcept; |
+template <class Integral> |
+ Integral |
+ atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept; |
+ |
+template <class Integral> |
+ Integral |
+ atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op, |
+ memory_order m) noexcept; |
+template <class Integral> |
+ Integral |
+ atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op, |
+ memory_order m) noexcept; |
+ |
+template <class T> |
+ T* |
+ atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept; |
+ |
+template <class T> |
+ T* |
+ atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept; |
+ |
+template <class T> |
+ T* |
+ atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op, |
+ memory_order m) noexcept; |
+template <class T> |
+ T* |
+ atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept; |
+ |
+template <class T> |
+ T* |
+ atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept; |
+ |
+template <class T> |
+ T* |
+ atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept; |
+ |
+template <class T> |
+ T* |
+ atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op, |
+ memory_order m) noexcept; |
+template <class T> |
+ T* |
+ atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept; |
+ |
+// Atomics for standard typedef types |
+ |
+typedef atomic<bool> atomic_bool; |
+typedef atomic<char> atomic_char; |
+typedef atomic<signed char> atomic_schar; |
+typedef atomic<unsigned char> atomic_uchar; |
+typedef atomic<short> atomic_short; |
+typedef atomic<unsigned short> atomic_ushort; |
+typedef atomic<int> atomic_int; |
+typedef atomic<unsigned int> atomic_uint; |
+typedef atomic<long> atomic_long; |
+typedef atomic<unsigned long> atomic_ulong; |
+typedef atomic<long long> atomic_llong; |
+typedef atomic<unsigned long long> atomic_ullong; |
+typedef atomic<char16_t> atomic_char16_t; |
+typedef atomic<char32_t> atomic_char32_t; |
+typedef atomic<wchar_t> atomic_wchar_t; |
+ |
+typedef atomic<int_least8_t> atomic_int_least8_t; |
+typedef atomic<uint_least8_t> atomic_uint_least8_t; |
+typedef atomic<int_least16_t> atomic_int_least16_t; |
+typedef atomic<uint_least16_t> atomic_uint_least16_t; |
+typedef atomic<int_least32_t> atomic_int_least32_t; |
+typedef atomic<uint_least32_t> atomic_uint_least32_t; |
+typedef atomic<int_least64_t> atomic_int_least64_t; |
+typedef atomic<uint_least64_t> atomic_uint_least64_t; |
+ |
+typedef atomic<int_fast8_t> atomic_int_fast8_t; |
+typedef atomic<uint_fast8_t> atomic_uint_fast8_t; |
+typedef atomic<int_fast16_t> atomic_int_fast16_t; |
+typedef atomic<uint_fast16_t> atomic_uint_fast16_t; |
+typedef atomic<int_fast32_t> atomic_int_fast32_t; |
+typedef atomic<uint_fast32_t> atomic_uint_fast32_t; |
+typedef atomic<int_fast64_t> atomic_int_fast64_t; |
+typedef atomic<uint_fast64_t> atomic_uint_fast64_t; |
+ |
+typedef atomic<intptr_t> atomic_intptr_t; |
+typedef atomic<uintptr_t> atomic_uintptr_t; |
+typedef atomic<size_t> atomic_size_t; |
+typedef atomic<ptrdiff_t> atomic_ptrdiff_t; |
+typedef atomic<intmax_t> atomic_intmax_t; |
+typedef atomic<uintmax_t> atomic_uintmax_t; |
+ |
+// fences |
+ |
+void atomic_thread_fence(memory_order m) noexcept; |
+void atomic_signal_fence(memory_order m) noexcept; |
+ |
+} // std |
+ |
+*/ |
+ |
+#include <__config> |
+#include <cstddef> |
+#include <cstdint> |
+#include <type_traits> |
+ |
+#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) |
+#pragma GCC system_header |
+#endif |
+ |
+_LIBCPP_BEGIN_NAMESPACE_STD |
+ |
+#if !__has_feature(cxx_atomic) |
+#error <atomic> is not implemented |
+#else |
+ |
+typedef enum memory_order |
+{ |
+ memory_order_relaxed, memory_order_consume, memory_order_acquire, |
+ memory_order_release, memory_order_acq_rel, memory_order_seq_cst |
+} memory_order; |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp |
+kill_dependency(_Tp __y) _NOEXCEPT |
+{ |
+ return __y; |
+} |
+ |
+// general atomic<T> |
+ |
+template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value> |
+struct __atomic_base // false |
+{ |
+ mutable _Atomic(_Tp) __a_; |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool is_lock_free() const volatile _NOEXCEPT |
+ {return __c11_atomic_is_lock_free(sizeof(_Tp));} |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool is_lock_free() const _NOEXCEPT |
+ {return __c11_atomic_is_lock_free(sizeof(_Tp));} |
+ _LIBCPP_INLINE_VISIBILITY |
+ void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {__c11_atomic_store(&__a_, __d, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {__c11_atomic_store(&__a_, __d, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT |
+ {return __c11_atomic_load(&__a_, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT |
+ {return __c11_atomic_load(&__a_, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ operator _Tp() const volatile _NOEXCEPT {return load();} |
+ _LIBCPP_INLINE_VISIBILITY |
+ operator _Tp() const _NOEXCEPT {return load();} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {return __c11_atomic_exchange(&__a_, __d, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_exchange(&__a_, __d, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool compare_exchange_weak(_Tp& __e, _Tp __d, |
+ memory_order __s, memory_order __f) volatile _NOEXCEPT |
+ {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool compare_exchange_weak(_Tp& __e, _Tp __d, |
+ memory_order __s, memory_order __f) _NOEXCEPT |
+ {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool compare_exchange_strong(_Tp& __e, _Tp __d, |
+ memory_order __s, memory_order __f) volatile _NOEXCEPT |
+ {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool compare_exchange_strong(_Tp& __e, _Tp __d, |
+ memory_order __s, memory_order __f) _NOEXCEPT |
+ {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool compare_exchange_weak(_Tp& __e, _Tp __d, |
+ memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool compare_exchange_weak(_Tp& __e, _Tp __d, |
+ memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool compare_exchange_strong(_Tp& __e, _Tp __d, |
+ memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool compare_exchange_strong(_Tp& __e, _Tp __d, |
+ memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS |
+ __atomic_base() _NOEXCEPT = default; |
+#else |
+ __atomic_base() _NOEXCEPT : __a_() {} |
+#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+ _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {} |
+#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS |
+ __atomic_base(const __atomic_base&) = delete; |
+ __atomic_base& operator=(const __atomic_base&) = delete; |
+ __atomic_base& operator=(const __atomic_base&) volatile = delete; |
+#else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS |
+private: |
+ __atomic_base(const __atomic_base&); |
+ __atomic_base& operator=(const __atomic_base&); |
+ __atomic_base& operator=(const __atomic_base&) volatile; |
+#endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS |
+}; |
+ |
+// atomic<Integral> |
+ |
+template <class _Tp> |
+struct __atomic_base<_Tp, true> |
+ : public __atomic_base<_Tp, false> |
+{ |
+ typedef __atomic_base<_Tp, false> __base; |
+ _LIBCPP_INLINE_VISIBILITY |
+ __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT |
+ _LIBCPP_INLINE_VISIBILITY |
+ _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {} |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {return __c11_atomic_fetch_and(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_fetch_and(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {return __c11_atomic_fetch_or(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_fetch_or(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);} |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;} |
+}; |
+ |
+// atomic<T> |
+ |
+template <class _Tp> |
+struct atomic |
+ : public __atomic_base<_Tp> |
+{ |
+ typedef __atomic_base<_Tp> __base; |
+ _LIBCPP_INLINE_VISIBILITY |
+ atomic() _NOEXCEPT _LIBCPP_DEFAULT |
+ _LIBCPP_INLINE_VISIBILITY |
+ _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {} |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator=(_Tp __d) volatile _NOEXCEPT |
+ {__base::store(__d); return __d;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp operator=(_Tp __d) _NOEXCEPT |
+ {__base::store(__d); return __d;} |
+}; |
+ |
+// atomic<T*> |
+ |
+template <class _Tp> |
+struct atomic<_Tp*> |
+ : public __atomic_base<_Tp*> |
+{ |
+ typedef __atomic_base<_Tp*> __base; |
+ _LIBCPP_INLINE_VISIBILITY |
+ atomic() _NOEXCEPT _LIBCPP_DEFAULT |
+ _LIBCPP_INLINE_VISIBILITY |
+ _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {} |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator=(_Tp* __d) volatile _NOEXCEPT |
+ {__base::store(__d); return __d;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator=(_Tp* __d) _NOEXCEPT |
+ {__base::store(__d); return __d;} |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) |
+ volatile _NOEXCEPT |
+ {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) |
+ volatile _NOEXCEPT |
+ {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} |
+ _LIBCPP_INLINE_VISIBILITY |
+ _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;} |
+}; |
+ |
+// atomic_is_lock_free |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT |
+{ |
+ return __o->is_lock_free(); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT |
+{ |
+ return __o->is_lock_free(); |
+} |
+ |
+// atomic_init |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT |
+{ |
+ __c11_atomic_init(&__o->__a_, __d); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT |
+{ |
+ __c11_atomic_init(&__o->__a_, __d); |
+} |
+ |
+// atomic_store |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT |
+{ |
+ __o->store(__d); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT |
+{ |
+ __o->store(__d); |
+} |
+ |
+// atomic_store_explicit |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT |
+{ |
+ __o->store(__d, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT |
+{ |
+ __o->store(__d, __m); |
+} |
+ |
+// atomic_load |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp |
+atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT |
+{ |
+ return __o->load(); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp |
+atomic_load(const atomic<_Tp>* __o) _NOEXCEPT |
+{ |
+ return __o->load(); |
+} |
+ |
+// atomic_load_explicit |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp |
+atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->load(__m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp |
+atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->load(__m); |
+} |
+ |
+// atomic_exchange |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp |
+atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT |
+{ |
+ return __o->exchange(__d); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp |
+atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT |
+{ |
+ return __o->exchange(__d); |
+} |
+ |
+// atomic_exchange_explicit |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp |
+atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->exchange(__d, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp |
+atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->exchange(__d, __m); |
+} |
+ |
+// atomic_compare_exchange_weak |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT |
+{ |
+ return __o->compare_exchange_weak(*__e, __d); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT |
+{ |
+ return __o->compare_exchange_weak(*__e, __d); |
+} |
+ |
+// atomic_compare_exchange_strong |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT |
+{ |
+ return __o->compare_exchange_strong(*__e, __d); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT |
+{ |
+ return __o->compare_exchange_strong(*__e, __d); |
+} |
+ |
+// atomic_compare_exchange_weak_explicit |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e, |
+ _Tp __d, |
+ memory_order __s, memory_order __f) _NOEXCEPT |
+{ |
+ return __o->compare_exchange_weak(*__e, __d, __s, __f); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d, |
+ memory_order __s, memory_order __f) _NOEXCEPT |
+{ |
+ return __o->compare_exchange_weak(*__e, __d, __s, __f); |
+} |
+ |
+// atomic_compare_exchange_strong_explicit |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o, |
+ _Tp* __e, _Tp __d, |
+ memory_order __s, memory_order __f) _NOEXCEPT |
+{ |
+ return __o->compare_exchange_strong(*__e, __d, __s, __f); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e, |
+ _Tp __d, |
+ memory_order __s, memory_order __f) _NOEXCEPT |
+{ |
+ return __o->compare_exchange_strong(*__e, __d, __s, __f); |
+} |
+ |
+// atomic_fetch_add |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT |
+{ |
+ return __o->fetch_add(__op); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT |
+{ |
+ return __o->fetch_add(__op); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp* |
+atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT |
+{ |
+ return __o->fetch_add(__op); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp* |
+atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT |
+{ |
+ return __o->fetch_add(__op); |
+} |
+ |
+// atomic_fetch_add_explicit |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_add(__op, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_add(__op, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp* |
+atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op, |
+ memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_add(__op, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp* |
+atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_add(__op, __m); |
+} |
+ |
+// atomic_fetch_sub |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT |
+{ |
+ return __o->fetch_sub(__op); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT |
+{ |
+ return __o->fetch_sub(__op); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp* |
+atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT |
+{ |
+ return __o->fetch_sub(__op); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp* |
+atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT |
+{ |
+ return __o->fetch_sub(__op); |
+} |
+ |
+// atomic_fetch_sub_explicit |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_sub(__op, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_sub(__op, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp* |
+atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op, |
+ memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_sub(__op, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+_Tp* |
+atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_sub(__op, __m); |
+} |
+ |
+// atomic_fetch_and |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT |
+{ |
+ return __o->fetch_and(__op); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT |
+{ |
+ return __o->fetch_and(__op); |
+} |
+ |
+// atomic_fetch_and_explicit |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_and(__op, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_and(__op, __m); |
+} |
+ |
+// atomic_fetch_or |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT |
+{ |
+ return __o->fetch_or(__op); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT |
+{ |
+ return __o->fetch_or(__op); |
+} |
+ |
+// atomic_fetch_or_explicit |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_or(__op, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_or(__op, __m); |
+} |
+ |
+// atomic_fetch_xor |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT |
+{ |
+ return __o->fetch_xor(__op); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT |
+{ |
+ return __o->fetch_xor(__op); |
+} |
+ |
+// atomic_fetch_xor_explicit |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_xor(__op, __m); |
+} |
+ |
+template <class _Tp> |
+inline _LIBCPP_INLINE_VISIBILITY |
+typename enable_if |
+< |
+ is_integral<_Tp>::value && !is_same<_Tp, bool>::value, |
+ _Tp |
+>::type |
+atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->fetch_xor(__op, __m); |
+} |
+ |
+// flag type and operations |
+ |
+typedef struct atomic_flag |
+{ |
+ _Atomic(bool) __a_; |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {return __c11_atomic_exchange(&__a_, true, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {return __c11_atomic_exchange(&__a_, true, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
+ {__c11_atomic_store(&__a_, false, __m);} |
+ _LIBCPP_INLINE_VISIBILITY |
+ void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT |
+ {__c11_atomic_store(&__a_, false, __m);} |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS |
+ atomic_flag() _NOEXCEPT = default; |
+#else |
+ atomic_flag() _NOEXCEPT : __a_() {} |
+#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS |
+ |
+ _LIBCPP_INLINE_VISIBILITY |
+ atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} |
+ |
+#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS |
+ atomic_flag(const atomic_flag&) = delete; |
+ atomic_flag& operator=(const atomic_flag&) = delete; |
+ atomic_flag& operator=(const atomic_flag&) volatile = delete; |
+#else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS |
+private: |
+ atomic_flag(const atomic_flag&); |
+ atomic_flag& operator=(const atomic_flag&); |
+ atomic_flag& operator=(const atomic_flag&) volatile; |
+#endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS |
+} atomic_flag; |
+ |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT |
+{ |
+ return __o->test_and_set(); |
+} |
+ |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT |
+{ |
+ return __o->test_and_set(); |
+} |
+ |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->test_and_set(__m); |
+} |
+ |
+inline _LIBCPP_INLINE_VISIBILITY |
+bool |
+atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT |
+{ |
+ return __o->test_and_set(__m); |
+} |
+ |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT |
+{ |
+ __o->clear(); |
+} |
+ |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_flag_clear(atomic_flag* __o) _NOEXCEPT |
+{ |
+ __o->clear(); |
+} |
+ |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT |
+{ |
+ __o->clear(__m); |
+} |
+ |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT |
+{ |
+ __o->clear(__m); |
+} |
+ |
+// fences |
+ |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_thread_fence(memory_order __m) _NOEXCEPT |
+{ |
+ __c11_atomic_thread_fence(__m); |
+} |
+ |
+inline _LIBCPP_INLINE_VISIBILITY |
+void |
+atomic_signal_fence(memory_order __m) _NOEXCEPT |
+{ |
+ __c11_atomic_signal_fence(__m); |
+} |
+ |
+// Atomics for standard typedef types |
+ |
+typedef atomic<bool> atomic_bool; |
+typedef atomic<char> atomic_char; |
+typedef atomic<signed char> atomic_schar; |
+typedef atomic<unsigned char> atomic_uchar; |
+typedef atomic<short> atomic_short; |
+typedef atomic<unsigned short> atomic_ushort; |
+typedef atomic<int> atomic_int; |
+typedef atomic<unsigned int> atomic_uint; |
+typedef atomic<long> atomic_long; |
+typedef atomic<unsigned long> atomic_ulong; |
+typedef atomic<long long> atomic_llong; |
+typedef atomic<unsigned long long> atomic_ullong; |
+typedef atomic<char16_t> atomic_char16_t; |
+typedef atomic<char32_t> atomic_char32_t; |
+typedef atomic<wchar_t> atomic_wchar_t; |
+ |
+typedef atomic<int_least8_t> atomic_int_least8_t; |
+typedef atomic<uint_least8_t> atomic_uint_least8_t; |
+typedef atomic<int_least16_t> atomic_int_least16_t; |
+typedef atomic<uint_least16_t> atomic_uint_least16_t; |
+typedef atomic<int_least32_t> atomic_int_least32_t; |
+typedef atomic<uint_least32_t> atomic_uint_least32_t; |
+typedef atomic<int_least64_t> atomic_int_least64_t; |
+typedef atomic<uint_least64_t> atomic_uint_least64_t; |
+ |
+typedef atomic<int_fast8_t> atomic_int_fast8_t; |
+typedef atomic<uint_fast8_t> atomic_uint_fast8_t; |
+typedef atomic<int_fast16_t> atomic_int_fast16_t; |
+typedef atomic<uint_fast16_t> atomic_uint_fast16_t; |
+typedef atomic<int_fast32_t> atomic_int_fast32_t; |
+typedef atomic<uint_fast32_t> atomic_uint_fast32_t; |
+typedef atomic<int_fast64_t> atomic_int_fast64_t; |
+typedef atomic<uint_fast64_t> atomic_uint_fast64_t; |
+ |
+typedef atomic<intptr_t> atomic_intptr_t; |
+typedef atomic<uintptr_t> atomic_uintptr_t; |
+typedef atomic<size_t> atomic_size_t; |
+typedef atomic<ptrdiff_t> atomic_ptrdiff_t; |
+typedef atomic<intmax_t> atomic_intmax_t; |
+typedef atomic<uintmax_t> atomic_uintmax_t; |
+ |
+#define ATOMIC_FLAG_INIT {false} |
+#define ATOMIC_VAR_INIT(__v) {__v} |
+ |
+// lock-free property |
+ |
+#define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE |
+#define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE |
+#define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE |
+#define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE |
+#define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE |
+#define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE |
+#define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE |
+#define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE |
+#define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE |
+#define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE |
+ |
+#endif // !__has_feature(cxx_atomic) |
+ |
+_LIBCPP_END_NAMESPACE_STD |
+ |
+#endif // _LIBCPP_ATOMIC |