OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2013 Google Inc. | 2 * Copyright 2014 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #ifndef SkAtomics_sync_DEFINED | 8 #ifndef SkAtomics_mips32_DEFINED |
9 #define SkAtomics_sync_DEFINED | 9 #define SkAtomics_mips32_DEFINED |
10 | 10 |
11 /** GCC/Clang __sync based atomics. */ | 11 /** Similar to SkAtomics_sync but the 32-bit MIPS toolchains for the android |
12 * framework are missing support for __sync* functions that operate on 64-bit | |
13 * values. Instead these functions uses __atomic to work around those specific | |
scroggo
2014/07/16 19:16:25
use*
| |
14 * issues until we can move to <stdatomic.h>. | |
15 */ | |
12 | 16 |
13 #include <stdint.h> | 17 #include <stdint.h> |
14 | 18 |
15 static inline __attribute__((always_inline)) int32_t sk_atomic_inc(int32_t* addr ) { | 19 static inline __attribute__((always_inline)) int32_t sk_atomic_inc(int32_t* addr ) { |
16 return __sync_fetch_and_add(addr, 1); | 20 return __sync_fetch_and_add(addr, 1); |
17 } | 21 } |
18 | 22 |
19 static inline __attribute__((always_inline)) int64_t sk_atomic_inc(int64_t* addr ) { | 23 static inline __attribute__((always_inline)) int64_t sk_atomic_inc(int64_t* addr ) { |
20 return __sync_fetch_and_add(addr, 1); | 24 return __atomic_fetch_add(addr, 1, __ATOMIC_SEQ_CST); |
21 } | 25 } |
22 | 26 |
23 static inline __attribute__((always_inline)) int32_t sk_atomic_add(int32_t* addr , int32_t inc) { | 27 static inline __attribute__((always_inline)) int32_t sk_atomic_add(int32_t* addr , int32_t inc) { |
24 return __sync_fetch_and_add(addr, inc); | 28 return __sync_fetch_and_add(addr, inc); |
25 } | 29 } |
26 | 30 |
27 static inline __attribute__((always_inline)) int64_t sk_atomic_add(int64_t* addr , int64_t inc) { | 31 static inline __attribute__((always_inline)) int64_t sk_atomic_add(int64_t* addr , int64_t inc) { |
28 return __sync_fetch_and_add(addr, inc); | 32 return __sync_fetch_and_add(addr, inc); |
29 } | 33 } |
30 | 34 |
31 static inline __attribute__((always_inline)) int32_t sk_atomic_dec(int32_t* addr ) { | 35 static inline __attribute__((always_inline)) int32_t sk_atomic_dec(int32_t* addr ) { |
32 return __sync_fetch_and_add(addr, -1); | 36 return __sync_fetch_and_add(addr, -1); |
33 } | 37 } |
34 | 38 |
35 static inline __attribute__((always_inline)) int64_t sk_atomic_dec(int64_t* addr ) { | 39 static inline __attribute__((always_inline)) int64_t sk_atomic_dec(int64_t* addr ) { |
36 return __sync_fetch_and_add(addr, -1); | 40 return __atomic_fetch_add(addr, -1, __ATOMIC_SEQ_CST); |
37 } | 41 } |
38 | 42 |
39 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi c_dec() { } | 43 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi c_dec() { } |
40 | 44 |
41 static inline __attribute__((always_inline)) bool sk_atomic_cas(int32_t* addr, | 45 static inline __attribute__((always_inline)) bool sk_atomic_cas(int32_t* addr, |
42 int32_t before, | 46 int32_t before, |
43 int32_t after) { | 47 int32_t after) { |
44 return __sync_bool_compare_and_swap(addr, before, after); | 48 return __sync_bool_compare_and_swap(addr, before, after); |
45 } | 49 } |
46 | 50 |
47 static inline __attribute__((always_inline)) bool sk_atomic_cas(int64_t* addr, | 51 static inline __attribute__((always_inline)) bool sk_atomic_cas(int64_t* addr, |
48 int64_t before, | 52 int64_t before, |
49 int64_t after) { | 53 int64_t after) { |
50 return __sync_bool_compare_and_swap(addr, before, after); | 54 return __atomic_compare_exchange_n(addr, &before, after, |
55 false /*strong*/, | |
56 __ATOMIC_SEQ_CST /*on success*/, | |
57 __ATOMIC_SEQ_CST /*on failure*/); | |
51 } | 58 } |
52 | 59 |
53 static inline __attribute__((always_inline)) void* sk_atomic_cas(void** addr, | 60 static inline __attribute__((always_inline)) void* sk_atomic_cas(void** addr, |
54 void* before, | 61 void* before, |
55 void* after) { | 62 void* after) { |
56 return __sync_val_compare_and_swap(addr, before, after); | 63 return __sync_val_compare_and_swap(addr, before, after); |
57 } | 64 } |
58 | 65 |
59 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi c_conditional_inc() { } | 66 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi c_conditional_inc() { } |
60 | 67 |
61 #endif | 68 #endif |
OLD | NEW |