OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2006 The Android Open Source Project | 2 * Copyright 2006 The Android Open Source Project |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #ifndef SkThread_DEFINED | 8 #ifndef SkThread_DEFINED |
9 #define SkThread_DEFINED | 9 #define SkThread_DEFINED |
10 | 10 |
11 #include "SkTypes.h" | 11 #include "SkTypes.h" |
12 | 12 |
13 // SK_ATOMICS_PLATFORM_H must provide inline implementations for the following d eclarations. | 13 // SK_ATOMICS_PLATFORM_H must provide inline implementations for the following d eclarations. |
14 | 14 |
15 /** Atomically adds one to the int referenced by addr and returns the previous v alue. | 15 /** Atomically adds one to the int referenced by addr and returns the previous v alue. |
16 * No additional memory barrier is required; this must act as a compiler barrie r. | 16 * No additional memory barrier is required; this must act as a compiler barrie r. |
17 */ | 17 */ |
18 static int32_t sk_atomic_inc(int32_t* addr); | 18 static int32_t sk_atomic_inc(int32_t* addr); |
19 | 19 |
20 /** Atomically adds inc to the int referenced by addr and returns the previous v alue. | 20 /** Atomically adds inc to the int referenced by addr and returns the previous v alue. |
21 * No additional memory barrier is required; this must act as a compiler barrie r. | 21 * No additional memory barrier is required; this must act as a compiler barrie r. |
22 */ | 22 */ |
23 static int32_t sk_atomic_add(int32_t* addr, int32_t inc); | 23 static int32_t sk_atomic_add(int32_t* addr, int32_t inc); |
24 | 24 |
25 /** Atomically subtracts one from the int referenced by addr and returns the pre vious value. | 25 /** Atomically subtracts one from the int referenced by addr and returns the pre vious value. |
26 * This must act as a release (SL/S) memory barrier and as a compiler barrier. | 26 * This must act as a release (SL/S) memory barrier and as a compiler barrier. |
27 */ | 27 */ |
28 static int32_t sk_atomic_dec(int32_t* addr); | 28 static int32_t sk_atomic_dec(int32_t* addr); |
29 | 29 |
30 /** Atomically adds one to the int referenced by addr iff the referenced int was not 0 | |
31 * and returns the previous value. | |
32 * No additional memory barrier is required; this must act as a compiler barrie r. | |
33 */ | |
34 static int32_t sk_atomic_conditional_inc(int32_t* addr); | |
35 | |
36 /** Atomic compare and set. | 30 /** Atomic compare and set. |
37 * If *addr == before, set *addr to after and return true, otherwise return fal se. | 31 * If *addr == before, set *addr to after and return true, otherwise return fal se. |
38 * This must act as a release (SL/S) memory barrier and as a compiler barrier. | 32 * This must act as a release (SL/S) memory barrier and as a compiler barrier. |
39 */ | 33 */ |
40 static bool sk_atomic_cas(int32_t* addr, int32_t before, int32_t after); | 34 static bool sk_atomic_cas(int32_t* addr, int32_t before, int32_t after); |
41 | 35 |
42 /** If sk_atomic_dec does not act as an acquire (L/SL) barrier, | 36 /** If sk_atomic_dec does not act as an acquire (L/SL) barrier, |
43 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier. | 37 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier. |
44 */ | 38 */ |
45 static void sk_membar_acquire__after_atomic_dec(); | 39 static void sk_membar_acquire__after_atomic_dec(); |
46 | 40 |
47 /** If sk_atomic_conditional_inc does not act as an acquire (L/SL) barrier, | 41 /** If sk_atomic_conditional_inc does not act as an acquire (L/SL) barrier, |
48 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier. | 42 * this must act as an acquire (L/SL) memory barrier and as a compiler barrier. |
49 */ | 43 */ |
50 static void sk_membar_acquire__after_atomic_conditional_inc(); | 44 static void sk_membar_acquire__after_atomic_conditional_inc(); |
51 | 45 |
52 #include SK_ATOMICS_PLATFORM_H | 46 #include SK_ATOMICS_PLATFORM_H |
53 | 47 |
48 /** Atomically adds one to the int referenced by addr iff the referenced int was not 0 | |
49 * and returns the previous value. | |
50 * No additional memory barrier is required; this must act as a compiler barrie r. | |
51 */ | |
52 static int32_t sk_atomic_conditional_inc(int32_t* addr) { | |
53 int32_t prev; | |
54 do { | |
55 prev = *addr; | |
bungeman-skia
2014/05/27 14:20:20
Eck, it irks me that we have to do this load on al
mtklein
2014/05/27 14:27:27
Agreed. sk_atomic_cas is a bit least-common-denom
| |
56 if (0 == prev) { | |
reed1
2014/05/27 14:36:18
Why the test/break? Does this warrent a comment?
mtklein
2014/05/27 14:38:30
Would return 0 be clearer? That's the conditional
reed1
2014/05/27 14:44:19
Ah, no, my bad. I didn't read the bloody name of t
| |
57 break; | |
58 } | |
59 } while (!sk_atomic_cas(addr, prev, prev+1)); | |
60 return prev; | |
61 } | |
62 | |
54 /** SK_MUTEX_PLATFORM_H must provide the following (or equivalent) declarations. | 63 /** SK_MUTEX_PLATFORM_H must provide the following (or equivalent) declarations. |
55 | 64 |
56 class SkBaseMutex { | 65 class SkBaseMutex { |
57 public: | 66 public: |
58 void acquire(); | 67 void acquire(); |
59 void release(); | 68 void release(); |
60 }; | 69 }; |
61 | 70 |
62 class SkMutex : SkBaseMutex { | 71 class SkMutex : SkBaseMutex { |
63 public: | 72 public: |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
99 fMutex = NULL; | 108 fMutex = NULL; |
100 } | 109 } |
101 } | 110 } |
102 | 111 |
103 private: | 112 private: |
104 SkBaseMutex* fMutex; | 113 SkBaseMutex* fMutex; |
105 }; | 114 }; |
106 #define SkAutoMutexAcquire(...) SK_REQUIRE_LOCAL_VAR(SkAutoMutexAcquire) | 115 #define SkAutoMutexAcquire(...) SK_REQUIRE_LOCAL_VAR(SkAutoMutexAcquire) |
107 | 116 |
108 #endif | 117 #endif |
OLD | NEW |