OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2013 Google Inc. | 2 * Copyright 2013 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #ifndef SkAtomics_sync_DEFINED | 8 #ifndef SkAtomics_sync_DEFINED |
9 #define SkAtomics_sync_DEFINED | 9 #define SkAtomics_sync_DEFINED |
10 | 10 |
11 /** GCC/Clang __sync based atomics. */ | 11 /** GCC/Clang __sync based atomics. */ |
12 | 12 |
13 #include <stdint.h> | 13 #include <stdint.h> |
14 | 14 |
15 static inline __attribute__((always_inline)) int32_t sk_atomic_inc(int32_t* addr
) { | 15 static inline __attribute__((always_inline)) int32_t sk_atomic_inc(int32_t* addr
) { |
16 return __sync_fetch_and_add(addr, 1); | 16 return __sync_fetch_and_add(addr, 1); |
17 } | 17 } |
18 | 18 |
| 19 static inline __attribute__((always_inline)) int64_t sk_atomic_inc(int64_t* addr
) { |
| 20 return __sync_fetch_and_add(addr, 1); |
| 21 } |
| 22 |
19 static inline __attribute__((always_inline)) int32_t sk_atomic_add(int32_t* addr
, int32_t inc) { | 23 static inline __attribute__((always_inline)) int32_t sk_atomic_add(int32_t* addr
, int32_t inc) { |
20 return __sync_fetch_and_add(addr, inc); | 24 return __sync_fetch_and_add(addr, inc); |
21 } | 25 } |
22 | 26 |
| 27 static inline __attribute__((always_inline)) int64_t sk_atomic_add(int64_t* addr
, int64_t inc) { |
| 28 return __sync_fetch_and_add(addr, inc); |
| 29 } |
| 30 |
23 static inline __attribute__((always_inline)) int32_t sk_atomic_dec(int32_t* addr
) { | 31 static inline __attribute__((always_inline)) int32_t sk_atomic_dec(int32_t* addr
) { |
24 return __sync_fetch_and_add(addr, -1); | 32 return __sync_fetch_and_add(addr, -1); |
25 } | 33 } |
26 | 34 |
| 35 static inline __attribute__((always_inline)) int64_t sk_atomic_dec(int64_t* addr
) { |
| 36 return __sync_fetch_and_add(addr, -1); |
| 37 } |
| 38 |
27 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi
c_dec() { } | 39 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi
c_dec() { } |
28 | 40 |
29 static inline __attribute__((always_inline)) bool sk_atomic_cas(int32_t* addr, | 41 static inline __attribute__((always_inline)) bool sk_atomic_cas(int32_t* addr, |
30 int32_t before, | 42 int32_t before, |
31 int32_t after) { | 43 int32_t after) { |
32 return __sync_bool_compare_and_swap(addr, before, after); | 44 return __sync_bool_compare_and_swap(addr, before, after); |
33 } | 45 } |
34 | 46 |
| 47 static inline __attribute__((always_inline)) bool sk_atomic_cas(int64_t* addr, |
| 48 int64_t before, |
| 49 int64_t after) { |
| 50 return __sync_bool_compare_and_swap(addr, before, after); |
| 51 } |
| 52 |
35 static inline __attribute__((always_inline)) void* sk_atomic_cas(void** addr, | 53 static inline __attribute__((always_inline)) void* sk_atomic_cas(void** addr, |
36 void* before, | 54 void* before, |
37 void* after) { | 55 void* after) { |
38 return __sync_val_compare_and_swap(addr, before, after); | 56 return __sync_val_compare_and_swap(addr, before, after); |
39 } | 57 } |
40 | 58 |
41 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi
c_conditional_inc() { } | 59 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi
c_conditional_inc() { } |
42 | 60 |
43 #endif | 61 #endif |
OLD | NEW |