| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2006 The Android Open Source Project | 2 * Copyright 2006 The Android Open Source Project |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #ifndef SkThread_DEFINED | 8 #ifndef SkThread_DEFINED |
| 9 #define SkThread_DEFINED | 9 #define SkThread_DEFINED |
| 10 | 10 |
| 11 #include "SkTypes.h" | 11 #include "SkTypes.h" |
| 12 | 12 |
| 13 // SK_ATOMICS_PLATFORM_H must provide inline implementations for the following d
eclarations. | 13 // SK_ATOMICS_PLATFORM_H must provide inline implementations for the following d
eclarations. |
| 14 | 14 |
| 15 /** Atomically adds one to the int referenced by addr and returns the previous v
alue. | 15 /** Atomically adds one to the int referenced by addr and returns the previous v
alue. |
| 16 * No additional memory barrier is required; this must act as a compiler barrie
r. | 16 * No additional memory barrier is required; this must act as a compiler barrie
r. |
| 17 */ | 17 */ |
| 18 static int32_t sk_atomic_inc(int32_t* addr); | 18 static int32_t sk_atomic_inc(int32_t* addr); |
| 19 static int64_t sk_atomic_inc(int64_t* addr); |
| 19 | 20 |
| 20 /** Atomically adds inc to the int referenced by addr and returns the previous v
alue. | 21 /** Atomically adds inc to the int referenced by addr and returns the previous v
alue. |
| 21 * No additional memory barrier is required; this must act as a compiler barrie
r. | 22 * No additional memory barrier is required; this must act as a compiler barrie
r. |
| 22 */ | 23 */ |
| 23 static int32_t sk_atomic_add(int32_t* addr, int32_t inc); | 24 static int32_t sk_atomic_add(int32_t* addr, int32_t inc); |
| 25 static int64_t sk_atomic_add(int64_t* addr, int64_t inc); |
| 24 | 26 |
| 25 /** Atomically subtracts one from the int referenced by addr and returns the pre
vious value. | 27 /** Atomically subtracts one from the int referenced by addr and returns the pre
vious value. |
| 26 * This must act as a release (SL/S) memory barrier and as a compiler barrier. | 28 * This must act as a release (SL/S) memory barrier and as a compiler barrier. |
| 27 */ | 29 */ |
| 28 static int32_t sk_atomic_dec(int32_t* addr); | 30 static int32_t sk_atomic_dec(int32_t* addr); |
| 31 static int64_t sk_atomic_dec(int64_t* addr); |
| 29 | 32 |
| 30 /** Atomic compare and set. | 33 /** Atomic compare and set. |
| 31 * If *addr == before, set *addr to after and return true, otherwise return fal
se. | 34 * If *addr == before, set *addr to after and return true, otherwise return fal
se. |
| 32 * This must act as a release (SL/S) memory barrier and as a compiler barrier. | 35 * This must act as a release (SL/S) memory barrier and as a compiler barrier. |
| 33 */ | 36 */ |
| 34 static bool sk_atomic_cas(int32_t* addr, int32_t before, int32_t after); | 37 static bool sk_atomic_cas(int32_t* addr, int32_t before, int32_t after); |
| 38 static bool sk_atomic_cas(int64_t* addr, int64_t before, int64_t after); |
| 35 | 39 |
| 36 /** If sk_atomic_dec does not act as an acquire (L/SL) barrier, | 40 /** If sk_atomic_dec does not act as an acquire (L/SL) barrier, |
| 37 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier. | 41 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier. |
| 38 */ | 42 */ |
| 39 static void sk_membar_acquire__after_atomic_dec(); | 43 static void sk_membar_acquire__after_atomic_dec(); |
| 40 | 44 |
| 41 /** If sk_atomic_conditional_inc does not act as an acquire (L/SL) barrier, | 45 /** If sk_atomic_conditional_inc does not act as an acquire (L/SL) barrier, |
| 42 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier. | 46 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier. |
| 43 */ | 47 */ |
| 44 static void sk_membar_acquire__after_atomic_conditional_inc(); | 48 static void sk_membar_acquire__after_atomic_conditional_inc(); |
| 45 | 49 |
| 46 #include SK_ATOMICS_PLATFORM_H | 50 #include SK_ATOMICS_PLATFORM_H |
| 47 | 51 |
| 48 /** Atomically adds one to the int referenced by addr iff the referenced int was
not 0 | 52 /** Atomically adds one to the int referenced by addr iff the referenced int was
not 0 |
| 49 * and returns the previous value. | 53 * and returns the previous value. |
| 50 * No additional memory barrier is required; this must act as a compiler barrie
r. | 54 * No additional memory barrier is required; this must act as a compiler barrie
r. |
| 51 */ | 55 */ |
| 52 static inline int32_t sk_atomic_conditional_inc(int32_t* addr) { | 56 template<typename INT_TYPE> static inline INT_TYPE sk_atomic_conditional_inc(INT
_TYPE* addr) { |
| 53 int32_t prev; | 57 INT_TYPE prev; |
| 54 do { | 58 do { |
| 55 prev = *addr; | 59 prev = *addr; |
| 56 if (0 == prev) { | 60 if (0 == prev) { |
| 57 break; | 61 break; |
| 58 } | 62 } |
| 59 } while (!sk_atomic_cas(addr, prev, prev+1)); | 63 } while (!sk_atomic_cas(addr, prev, prev+1)); |
| 60 return prev; | 64 return prev; |
| 61 } | 65 } |
| 62 | 66 |
| 63 // SK_BARRIERS_PLATFORM_H must provide implementations for the following declara
tions: | 67 // SK_BARRIERS_PLATFORM_H must provide implementations for the following declara
tions: |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 134 SkASSERT(fMutex); | 138 SkASSERT(fMutex); |
| 135 fMutex->assertHeld(); | 139 fMutex->assertHeld(); |
| 136 } | 140 } |
| 137 | 141 |
| 138 private: | 142 private: |
| 139 SkBaseMutex* fMutex; | 143 SkBaseMutex* fMutex; |
| 140 }; | 144 }; |
| 141 #define SkAutoMutexAcquire(...) SK_REQUIRE_LOCAL_VAR(SkAutoMutexAcquire) | 145 #define SkAutoMutexAcquire(...) SK_REQUIRE_LOCAL_VAR(SkAutoMutexAcquire) |
| 142 | 146 |
| 143 #endif | 147 #endif |
| OLD | NEW |