OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright 2006 The Android Open Source Project | |
3 * | |
4 * Use of this source code is governed by a BSD-style license that can be | |
5 * found in the LICENSE file. | |
6 */ | |
7 | |
8 #ifndef SkThread_DEFINED | |
9 #define SkThread_DEFINED | |
10 | |
11 #include "SkTypes.h" | |
12 | |
13 // SK_ATOMICS_PLATFORM_H must provide inline implementations for the following d
eclarations. | |
14 | |
15 /** Atomically adds one to the int referenced by addr and returns the previous v
alue. | |
16 * No additional memory barrier is required; this must act as a compiler barrie
r. | |
17 */ | |
18 static int32_t sk_atomic_inc(int32_t* addr); | |
19 | |
20 /** Atomically adds inc to the int referenced by addr and returns the previous v
alue. | |
21 * No additional memory barrier is required; this must act as a compiler barrie
r. | |
22 */ | |
23 static int32_t sk_atomic_add(int32_t* addr, int32_t inc); | |
24 | |
25 /** Atomically subtracts one from the int referenced by addr and returns the pre
vious value. | |
26 * This must act as a release (SL/S) memory barrier and as a compiler barrier. | |
27 */ | |
28 static int32_t sk_atomic_dec(int32_t* addr); | |
29 | |
30 /** Atomic compare and set. | |
31 * If *addr == before, set *addr to after and return true, otherwise return fal
se. | |
32 * This must act as a release (SL/S) memory barrier and as a compiler barrier. | |
33 */ | |
34 static bool sk_atomic_cas(int32_t* addr, int32_t before, int32_t after); | |
35 | |
36 /** If sk_atomic_dec does not act as an acquire (L/SL) barrier, | |
37 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier. | |
38 */ | |
39 static void sk_membar_acquire__after_atomic_dec(); | |
40 | |
41 /** If sk_atomic_conditional_inc does not act as an acquire (L/SL) barrier, | |
42 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier. | |
43 */ | |
44 static void sk_membar_acquire__after_atomic_conditional_inc(); | |
45 | |
46 #include SK_ATOMICS_PLATFORM_H | |
47 | |
48 /** Atomically adds one to the int referenced by addr iff the referenced int was
not 0 | |
49 * and returns the previous value. | |
50 * No additional memory barrier is required; this must act as a compiler barrie
r. | |
51 */ | |
52 static inline int32_t sk_atomic_conditional_inc(int32_t* addr) { | |
53 int32_t prev; | |
54 do { | |
55 prev = *addr; | |
56 if (0 == prev) { | |
57 break; | |
58 } | |
59 } while (!sk_atomic_cas(addr, prev, prev+1)); | |
60 return prev; | |
61 } | |
62 | |
63 /** SK_MUTEX_PLATFORM_H must provide the following (or equivalent) declarations. | |
64 | |
65 class SkBaseMutex { | |
66 public: | |
67 void acquire(); | |
68 void release(); | |
69 }; | |
70 | |
71 class SkMutex : SkBaseMutex { | |
72 public: | |
73 SkMutex(); | |
74 ~SkMutex(); | |
75 }; | |
76 | |
77 #define SK_DECLARE_STATIC_MUTEX(name) static SkBaseMutex name = ... | |
78 #define SK_DECLARE_GLOBAL_MUTEX(name) SkBaseMutex name = ... | |
79 */ | |
80 | |
81 #include SK_MUTEX_PLATFORM_H | |
82 | |
83 | |
84 class SkAutoMutexAcquire : SkNoncopyable { | |
85 public: | |
86 explicit SkAutoMutexAcquire(SkBaseMutex& mutex) : fMutex(&mutex) { | |
87 SkASSERT(fMutex != NULL); | |
88 mutex.acquire(); | |
89 } | |
90 | |
91 explicit SkAutoMutexAcquire(SkBaseMutex* mutex) : fMutex(mutex) { | |
92 if (mutex) { | |
93 mutex->acquire(); | |
94 } | |
95 } | |
96 | |
97 /** If the mutex has not been released, release it now. */ | |
98 ~SkAutoMutexAcquire() { | |
99 if (fMutex) { | |
100 fMutex->release(); | |
101 } | |
102 } | |
103 | |
104 /** If the mutex has not been released, release it now. */ | |
105 void release() { | |
106 if (fMutex) { | |
107 fMutex->release(); | |
108 fMutex = NULL; | |
109 } | |
110 } | |
111 | |
112 private: | |
113 SkBaseMutex* fMutex; | |
114 }; | |
115 #define SkAutoMutexAcquire(...) SK_REQUIRE_LOCAL_VAR(SkAutoMutexAcquire) | |
116 | |
117 #endif | |
OLD | NEW |