OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. | 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. |
3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) | 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions | 6 * modification, are permitted provided that the following conditions |
7 * are met: | 7 * are met: |
8 * | 8 * |
9 * 1. Redistributions of source code must retain the above copyright | 9 * 1. Redistributions of source code must retain the above copyright |
10 * notice, this list of conditions and the following disclaimer. | 10 * notice, this list of conditions and the following disclaimer. |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
71 ASSERT(!ret || ret == 1); | 71 ASSERT(!ret || ret == 1); |
72 return ret; | 72 return ret; |
73 } | 73 } |
74 | 74 |
75 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) | 75 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) |
76 { | 76 { |
77 ASSERT(*ptr == 1); | 77 ASSERT(*ptr == 1); |
78 InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 0); | 78 InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 0); |
79 } | 79 } |
80 | 80 |
| 81 ALWAYS_INLINE void* atomicTestAndSwap(void* volatile* ptr, void* comprand, void*
replacement) |
| 82 { |
| 83 return InterlockedCompareExchangePointer(ptr, replacement, comprand); |
| 84 } |
| 85 |
| 86 ALWAYS_INLINE void* atomicExchange(void* volatile* ptr, void* value) |
| 87 { |
| 88 return InterlockedExchangePointer(ptr, value); |
| 89 } |
| 90 |
81 #else | 91 #else |
82 | 92 |
83 // atomicAdd returns the result of the addition. | 93 // atomicAdd returns the result of the addition. |
84 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) { return __sync
_add_and_fetch(addend, increment); } | 94 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) { return __sync
_add_and_fetch(addend, increment); } |
85 // atomicSubtract returns the result of the subtraction. | 95 // atomicSubtract returns the result of the subtraction. |
86 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) { return _
_sync_sub_and_fetch(addend, decrement); } | 96 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) { return _
_sync_sub_and_fetch(addend, decrement); } |
87 | 97 |
88 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return atomicAdd(adden
d, 1); } | 98 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return atomicAdd(adden
d, 1); } |
89 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return atomicSubtract(
addend, 1); } | 99 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return atomicSubtract(
addend, 1); } |
90 | 100 |
91 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return __sync_
add_and_fetch(addend, 1); } | 101 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return __sync_
add_and_fetch(addend, 1); } |
92 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return __sync_
sub_and_fetch(addend, 1); } | 102 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return __sync_
sub_and_fetch(addend, 1); } |
93 | 103 |
94 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr) | 104 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr) |
95 { | 105 { |
96 int ret = __sync_lock_test_and_set(ptr, 1); | 106 int ret = __sync_lock_test_and_set(ptr, 1); |
97 ASSERT(!ret || ret == 1); | 107 ASSERT(!ret || ret == 1); |
98 return ret; | 108 return ret; |
99 } | 109 } |
100 | 110 |
101 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) | 111 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) |
102 { | 112 { |
103 ASSERT(*ptr == 1); | 113 ASSERT(*ptr == 1); |
104 __sync_lock_release(ptr); | 114 __sync_lock_release(ptr); |
105 } | 115 } |
| 116 |
| 117 ALWAYS_INLINE void* atomicTestAndSwap(void *volatile* ptr, void* comprand, void*
replacement) |
| 118 { |
| 119 return __sync_val_compare_and_swap(ptr, comprand, replacement); |
| 120 } |
| 121 |
| 122 ALWAYS_INLINE void* atomicExchange(void *volatile* ptr, void* value) |
| 123 { |
| 124 return __sync_lock_test_and_set(ptr, value); |
| 125 } |
| 126 |
106 #endif | 127 #endif |
107 | 128 |
108 #if defined(THREAD_SANITIZER) | 129 #if defined(THREAD_SANITIZER) |
109 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value) | 130 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value) |
110 { | 131 { |
111 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); | 132 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); |
112 } | 133 } |
113 | 134 |
114 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) | 135 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) |
115 { | 136 { |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
156 return value; | 177 return value; |
157 } | 178 } |
158 | 179 |
159 #undef MEMORY_BARRIER | 180 #undef MEMORY_BARRIER |
160 | 181 |
161 #endif | 182 #endif |
162 | 183 |
163 } // namespace WTF | 184 } // namespace WTF |
164 | 185 |
165 using WTF::atomicAdd; | 186 using WTF::atomicAdd; |
166 using WTF::atomicSubtract; | |
167 using WTF::atomicDecrement; | 187 using WTF::atomicDecrement; |
| 188 using WTF::atomicExchange; |
168 using WTF::atomicIncrement; | 189 using WTF::atomicIncrement; |
169 using WTF::atomicTestAndSetToOne; | 190 using WTF::atomicTestAndSetToOne; |
| 191 using WTF::atomicTestAndSwap; |
| 192 using WTF::atomicSubtract; |
170 using WTF::atomicSetOneToZero; | 193 using WTF::atomicSetOneToZero; |
171 using WTF::acquireLoad; | 194 using WTF::acquireLoad; |
172 using WTF::releaseStore; | 195 using WTF::releaseStore; |
173 | 196 |
174 #endif // Atomics_h | 197 #endif // Atomics_h |
OLD | NEW |