OLD | NEW |
| (Empty) |
1 __attribute__((__visibility__( | |
2 "hidden"))) extern const void* __arm_atomics[3]; /* gettp, cas, barrier */ | |
3 | |
4 #if ((__ARM_ARCH_6__ || __ARM_ARCH_6K__ || __ARM_ARCH_6ZK__) && !__thumb__) || \ | |
5 __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7 | |
6 | |
7 #define a_ll a_ll | |
8 static inline int a_ll(volatile int* p) { | |
9 int v; | |
10 __asm__ __volatile__("ldrex %0, %1" : "=r"(v) : "Q"(*p)); | |
11 return v; | |
12 } | |
13 | |
14 #define a_sc a_sc | |
15 static inline int a_sc(volatile int* p, int v) { | |
16 int r; | |
17 __asm__ __volatile__("strex %0,%2,%1" | |
18 : "=&r"(r), "=Q"(*p) | |
19 : "r"(v) | |
20 : "memory"); | |
21 return !r; | |
22 } | |
23 | |
24 #if __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7 | |
25 | |
26 #define a_barrier a_barrier | |
27 static inline void a_barrier() { | |
28 __asm__ __volatile__("dmb ish" : : : "memory"); | |
29 } | |
30 | |
31 #endif | |
32 | |
33 #define a_pre_llsc a_barrier | |
34 #define a_post_llsc a_barrier | |
35 | |
36 #else | |
37 | |
38 #define a_cas a_cas | |
39 static inline int a_cas(volatile int* p, int t, int s) { | |
40 for (;;) { | |
41 register int r0 __asm__("r0") = t; | |
42 register int r1 __asm__("r1") = s; | |
43 register volatile int* r2 __asm__("r2") = p; | |
44 int old; | |
45 __asm__ __volatile__("bl __a_cas" | |
46 : "+r"(r0) | |
47 : "r"(r1), "r"(r2) | |
48 : "memory", "r3", "lr", "ip", "cc"); | |
49 if (!r0) | |
50 return t; | |
51 if ((old = *p) != t) | |
52 return old; | |
53 } | |
54 } | |
55 | |
56 #endif | |
57 | |
58 #ifndef a_barrier | |
59 #define a_barrier a_barrier | |
60 static inline void a_barrier() { | |
61 __asm__ __volatile__("bl __a_barrier" : : : "memory", "cc", "ip", "lr"); | |
62 } | |
63 #endif | |
64 | |
65 #define a_crash a_crash | |
66 static inline void a_crash() { | |
67 __asm__ __volatile__( | |
68 #ifndef __thumb__ | |
69 ".word 0xe7f000f0" | |
70 #else | |
71 ".short 0xdeff" | |
72 #endif | |
73 : | |
74 : | |
75 : "memory"); | |
76 } | |
OLD | NEW |