Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2013 Google Inc. | 2 * Copyright 2013 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #ifndef SkAtomics_sync_DEFINED | 8 #ifndef SkAtomics_sync_DEFINED |
| 9 #define SkAtomics_sync_DEFINED | 9 #define SkAtomics_sync_DEFINED |
| 10 | 10 |
| 11 /** GCC/Clang __sync based atomics. */ | 11 /** GCC/Clang __sync based atomics. */ |
| 12 | 12 |
| 13 #include <stdint.h> | 13 #include <stdint.h> |
| 14 | 14 |
| 15 static inline __attribute__((always_inline)) int32_t sk_atomic_inc(int32_t* addr ) { | 15 static inline __attribute__((always_inline)) int32_t sk_atomic_inc(int32_t* addr ) { |
| 16 return __sync_fetch_and_add(addr, 1); | 16 return __sync_fetch_and_add(addr, 1); |
| 17 } | 17 } |
| 18 | 18 |
| 19 static inline __attribute__((always_inline)) int64_t sk_atomic_inc(int64_t* addr ) { | 19 static inline __attribute__((always_inline)) int64_t sk_atomic_inc(int64_t* addr ) { |
| 20 #if defined(SK_BUILD_FOR_ANDROID_FRAMEWORK) && defined(__mips__) && !defined(__L P64__) | |
|
petarj
2014/07/18 16:41:04
Can you remove this part: defined(SK_BUILD_FOR_AND
| |
| 21 /** The 32-bit MIPS toolchain for the android framework is missing support | |
| 22 * for __sync* functions that operate on 64-bit values. The workaround is | |
| 23 * to use __atomic* functions until we can move everything to <stdatomic.h> . | |
| 24 */ | |
| 25 return __atomic_fetch_add(addr, 1, __ATOMIC_SEQ_CST); | |
| 26 #else | |
| 20 return __sync_fetch_and_add(addr, 1); | 27 return __sync_fetch_and_add(addr, 1); |
| 28 #endif | |
| 21 } | 29 } |
| 22 | 30 |
| 23 static inline __attribute__((always_inline)) int32_t sk_atomic_add(int32_t* addr , int32_t inc) { | 31 static inline __attribute__((always_inline)) int32_t sk_atomic_add(int32_t* addr , int32_t inc) { |
| 24 return __sync_fetch_and_add(addr, inc); | 32 return __sync_fetch_and_add(addr, inc); |
| 25 } | 33 } |
| 26 | 34 |
| 27 static inline __attribute__((always_inline)) int32_t sk_atomic_dec(int32_t* addr ) { | 35 static inline __attribute__((always_inline)) int32_t sk_atomic_dec(int32_t* addr ) { |
| 28 return __sync_fetch_and_add(addr, -1); | 36 return __sync_fetch_and_add(addr, -1); |
| 29 } | 37 } |
| 30 | 38 |
| 31 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi c_dec() { } | 39 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi c_dec() { } |
| 32 | 40 |
| 33 static inline __attribute__((always_inline)) bool sk_atomic_cas(int32_t* addr, | 41 static inline __attribute__((always_inline)) bool sk_atomic_cas(int32_t* addr, |
| 34 int32_t before, | 42 int32_t before, |
| 35 int32_t after) { | 43 int32_t after) { |
| 36 return __sync_bool_compare_and_swap(addr, before, after); | 44 return __sync_bool_compare_and_swap(addr, before, after); |
| 37 } | 45 } |
| 38 | 46 |
| 39 static inline __attribute__((always_inline)) void* sk_atomic_cas(void** addr, | 47 static inline __attribute__((always_inline)) void* sk_atomic_cas(void** addr, |
| 40 void* before, | 48 void* before, |
| 41 void* after) { | 49 void* after) { |
| 42 return __sync_val_compare_and_swap(addr, before, after); | 50 return __sync_val_compare_and_swap(addr, before, after); |
| 43 } | 51 } |
| 44 | 52 |
| 45 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi c_conditional_inc() { } | 53 static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi c_conditional_inc() { } |
| 46 | 54 |
| 47 #endif | 55 #endif |
| OLD | NEW |