OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2013 Google Inc. | 2 * Copyright 2013 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #ifndef SkAtomics_win_DEFINED | 8 #ifndef SkAtomics_win_DEFINED |
9 #define SkAtomics_win_DEFINED | 9 #define SkAtomics_win_DEFINED |
10 | 10 |
(...skipping 16 matching lines...) Expand all Loading... |
27 | 27 |
28 static inline int64_t sk_atomic_inc(int64_t* addr) { | 28 static inline int64_t sk_atomic_inc(int64_t* addr) { |
29 // InterlockedIncrement returns the new value, we want to return the old. | 29 // InterlockedIncrement returns the new value, we want to return the old. |
30 return InterlockedIncrement64(addr) - 1; | 30 return InterlockedIncrement64(addr) - 1; |
31 } | 31 } |
32 | 32 |
33 static inline int32_t sk_atomic_add(int32_t* addr, int32_t inc) { | 33 static inline int32_t sk_atomic_add(int32_t* addr, int32_t inc) { |
34 return _InterlockedExchangeAdd(reinterpret_cast<long*>(addr), static_cast<lo
ng>(inc)); | 34 return _InterlockedExchangeAdd(reinterpret_cast<long*>(addr), static_cast<lo
ng>(inc)); |
35 } | 35 } |
36 | 36 |
37 static inline int64_t sk_atomic_add(int64_t* addr, int64_t inc) { | |
38 return InterlockedExchangeAdd64(addr, inc); | |
39 } | |
40 | |
41 static inline int32_t sk_atomic_dec(int32_t* addr) { | 37 static inline int32_t sk_atomic_dec(int32_t* addr) { |
42 // InterlockedDecrement returns the new value, we want to return the old. | 38 // InterlockedDecrement returns the new value, we want to return the old. |
43 return _InterlockedDecrement(reinterpret_cast<long*>(addr)) + 1; | 39 return _InterlockedDecrement(reinterpret_cast<long*>(addr)) + 1; |
44 } | 40 } |
45 | 41 |
46 static inline int64_t sk_atomic_dec(int64_t* addr) { | |
47 // InterlockedDecrement returns the new value, we want to return the old. | |
48 return InterlockedDecrement64(addr) + 1; | |
49 } | |
50 | |
51 static inline void sk_membar_acquire__after_atomic_dec() { } | 42 static inline void sk_membar_acquire__after_atomic_dec() { } |
52 | 43 |
53 static inline bool sk_atomic_cas(int32_t* addr, int32_t before, int32_t after) { | 44 static inline bool sk_atomic_cas(int32_t* addr, int32_t before, int32_t after) { |
54 return _InterlockedCompareExchange(reinterpret_cast<long*>(addr), after, bef
ore) == before; | 45 return _InterlockedCompareExchange(reinterpret_cast<long*>(addr), after, bef
ore) == before; |
55 } | 46 } |
56 | 47 |
57 static inline bool sk_atomic_cas(int64_t* addr, int64_t before, int64_t after) { | |
58 return _InterlockedCompareExchange64(addr, after, before) == before; | |
59 } | |
60 | |
61 static inline void* sk_atomic_cas(void** addr, void* before, void* after) { | 48 static inline void* sk_atomic_cas(void** addr, void* before, void* after) { |
62 return InterlockedCompareExchangePointer(addr, after, before); | 49 return InterlockedCompareExchangePointer(addr, after, before); |
63 } | 50 } |
64 | 51 |
65 static inline void sk_membar_acquire__after_atomic_conditional_inc() { } | 52 static inline void sk_membar_acquire__after_atomic_conditional_inc() { } |
66 | 53 |
67 #endif | 54 #endif |
OLD | NEW |