Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. | 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. |
| 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) | 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions | 6 * modification, are permitted provided that the following conditions |
| 7 * are met: | 7 * are met: |
| 8 * | 8 * |
| 9 * 1. Redistributions of source code must retain the above copyright | 9 * 1. Redistributions of source code must retain the above copyright |
| 10 * notice, this list of conditions and the following disclaimer. | 10 * notice, this list of conditions and the following disclaimer. |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND | 24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND |
| 25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF | 26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
| 27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 28 */ | 28 */ |
| 29 | 29 |
| 30 #ifndef Atomics_h | 30 #ifndef Atomics_h |
| 31 #define Atomics_h | 31 #define Atomics_h |
| 32 | 32 |
| 33 #include "wtf/Assertions.h" | 33 #include "wtf/Assertions.h" |
| 34 #include "wtf/CPU.h" | |
| 34 | 35 |
| 35 #include <stdint.h> | 36 #include <stdint.h> |
| 36 | 37 |
| 37 #if COMPILER(MSVC) | 38 #if COMPILER(MSVC) |
| 38 #include <windows.h> | 39 #include <windows.h> |
| 39 #endif | 40 #endif |
| 40 | 41 |
| 42 #if defined(THREAD_SANITIZER) | |
| 43 #include <sanitizer/tsan_interface_atomic.h> | |
| 44 #endif | |
| 45 | |
| 41 namespace WTF { | 46 namespace WTF { |
| 42 | 47 |
| 43 #if COMPILER(MSVC) | 48 #if COMPILER(MSVC) |
| 44 | 49 |
| 45 // atomicAdd returns the result of the addition. | 50 // atomicAdd returns the result of the addition. |
| 46 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) | 51 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) |
| 47 { | 52 { |
| 48 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat ic_cast<long>(increment)) + increment; | 53 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat ic_cast<long>(increment)) + increment; |
| 49 } | 54 } |
| 50 | 55 |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 91 int ret = __sync_lock_test_and_set(ptr, 1); | 96 int ret = __sync_lock_test_and_set(ptr, 1); |
| 92 ASSERT(!ret || ret == 1); | 97 ASSERT(!ret || ret == 1); |
| 93 return ret; | 98 return ret; |
| 94 } | 99 } |
| 95 | 100 |
| 96 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) | 101 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) |
| 97 { | 102 { |
| 98 ASSERT(*ptr == 1); | 103 ASSERT(*ptr == 1); |
| 99 __sync_lock_release(ptr); | 104 __sync_lock_release(ptr); |
| 100 } | 105 } |
| 106 #endif | |
| 107 | |
| 108 #if defined(THREAD_SANITIZER) | |
| 109 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value) | |
| 110 { | |
| 111 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); | |
| 112 } | |
| 113 | |
| 114 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) | |
| 115 { | |
| 116 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); | |
| 117 } | |
| 118 #else | |
| 119 | |
| 120 #if CPU(X86) || CPU(X86_64) | |
| 121 // Only compiler barrier is needed. | |
| 122 #if COMPILER(MSVC) | |
| 123 // Starting from Visual Studio 2005 compiler guarantees acquire and release | |
| 124 // semantics for operations on volatile variables. See MSDN entry for | |
| 125 // MemoryBarrier macro. | |
| 126 #define MEMORY_BARRIER() | |
| 127 #else | |
| 128 #define MEMORY_BARRIER() __asm__ __volatile__("" : : : "memory") | |
| 129 #endif | |
| 130 #elif OS(LINUX) || OS(ANDROID) | |
|
Alexander Potapenko
2014/03/17 08:02:28
Here you implicitly assume CPU(ARM). This may brea
Vyacheslav Egorov (Chromium)
2014/03/17 12:00:41
The comment assumes ARM indeed but the code itself
Alexander Potapenko
2014/03/17 12:09:58
Not sure how this can work on Android MIPS (or any
Vyacheslav Egorov (Chromium)
2014/03/17 12:16:07
I misread the docs (https://www.kernel.org/doc/Doc
| |
| 131 // On ARM __sync_synchronize generates dmb which is very expensive on single | |
| 132 // core devices which don't actually need it. Avoid the cost by calling into | |
| 133 // kuser_memory_barrier helper. | |
| 134 inline void memoryBarrier() | |
| 135 { | |
| 136 // Note: This is a function call, which is also an implicit compiler barrier . | |
|
Alexander Potapenko
2014/03/17 08:02:28
If the 80-column limit applies here, please fix.
Vyacheslav Egorov (Chromium)
2014/03/17 12:00:41
(Fortunately) Blink does not have 80-column limit
| |
| 137 typedef void (*KernelMemoryBarrierFunc)(); | |
| 138 ((KernelMemoryBarrierFunc)0xffff0fa0)(); | |
| 139 } | |
| 140 #define MEMORY_BARRIER() memoryBarrier() | |
| 141 #else | |
| 142 #define MEMORY_BARRIER() __sync_synchronize() | |
| 143 #endif | |
| 144 | |
| 145 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value) | |
| 146 { | |
| 147 MEMORY_BARRIER(); | |
| 148 *ptr = value; | |
| 149 } | |
| 150 | |
| 151 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) | |
| 152 { | |
| 153 int value = *ptr; | |
| 154 MEMORY_BARRIER(); | |
| 155 return value; | |
| 156 } | |
| 157 | |
| 158 #undef MEMORY_BARRIER | |
| 101 | 159 |
| 102 #endif | 160 #endif |
| 103 | 161 |
| 104 } // namespace WTF | 162 } // namespace WTF |
| 105 | 163 |
| 106 using WTF::atomicAdd; | 164 using WTF::atomicAdd; |
| 107 using WTF::atomicSubtract; | 165 using WTF::atomicSubtract; |
| 108 using WTF::atomicDecrement; | 166 using WTF::atomicDecrement; |
| 109 using WTF::atomicIncrement; | 167 using WTF::atomicIncrement; |
| 110 using WTF::atomicTestAndSetToOne; | 168 using WTF::atomicTestAndSetToOne; |
| 111 using WTF::atomicSetOneToZero; | 169 using WTF::atomicSetOneToZero; |
| 170 using WTF::acquireLoad; | |
| 171 using WTF::releaseStore; | |
| 112 | 172 |
| 113 #endif // Atomics_h | 173 #endif // Atomics_h |
| OLD | NEW |