OLD | NEW |
1 #define a_cas a_cas | 1 #define a_cas a_cas |
2 static inline int a_cas(volatile int *p, int t, int s) | 2 static inline int a_cas(volatile int* p, int t, int s) { |
3 { | 3 __asm__ __volatile__("lock ; cmpxchg %3, %1" |
4 » __asm__ __volatile__ ( | 4 : "=a"(t), "=m"(*p) |
5 » » "lock ; cmpxchg %3, %1" | 5 : "a"(t), "r"(s) |
6 » » : "=a"(t), "=m"(*p) : "a"(t), "r"(s) : "memory" ); | 6 : "memory"); |
7 » return t; | 7 return t; |
8 } | 8 } |
9 | 9 |
10 #define a_cas_p a_cas_p | 10 #define a_cas_p a_cas_p |
11 static inline void *a_cas_p(volatile void *p, void *t, void *s) | 11 static inline void* a_cas_p(volatile void* p, void* t, void* s) { |
12 { | 12 __asm__("lock ; cmpxchg %3, %1" |
13 » __asm__( "lock ; cmpxchg %3, %1" | 13 : "=a"(t), "=m"(*(void* volatile*)p) |
14 » » : "=a"(t), "=m"(*(void *volatile *)p) | 14 : "a"(t), "r"(s) |
15 » » : "a"(t), "r"(s) : "memory" ); | 15 : "memory"); |
16 » return t; | 16 return t; |
17 } | 17 } |
18 | 18 |
19 #define a_swap a_swap | 19 #define a_swap a_swap |
20 static inline int a_swap(volatile int *p, int v) | 20 static inline int a_swap(volatile int* p, int v) { |
21 { | 21 __asm__ __volatile__("xchg %0, %1" : "=r"(v), "=m"(*p) : "0"(v) : "memory"); |
22 » __asm__ __volatile__( | 22 return v; |
23 » » "xchg %0, %1" | |
24 » » : "=r"(v), "=m"(*p) : "0"(v) : "memory" ); | |
25 » return v; | |
26 } | 23 } |
27 | 24 |
28 #define a_fetch_add a_fetch_add | 25 #define a_fetch_add a_fetch_add |
29 static inline int a_fetch_add(volatile int *p, int v) | 26 static inline int a_fetch_add(volatile int* p, int v) { |
30 { | 27 __asm__ __volatile__("lock ; xadd %0, %1" |
31 » __asm__ __volatile__( | 28 : "=r"(v), "=m"(*p) |
32 » » "lock ; xadd %0, %1" | 29 : "0"(v) |
33 » » : "=r"(v), "=m"(*p) : "0"(v) : "memory" ); | 30 : "memory"); |
34 » return v; | 31 return v; |
35 } | 32 } |
36 | 33 |
37 #define a_and a_and | 34 #define a_and a_and |
38 static inline void a_and(volatile int *p, int v) | 35 static inline void a_and(volatile int* p, int v) { |
39 { | 36 __asm__ __volatile__("lock ; and %1, %0" : "=m"(*p) : "r"(v) : "memory"); |
40 » __asm__ __volatile__( | |
41 » » "lock ; and %1, %0" | |
42 » » : "=m"(*p) : "r"(v) : "memory" ); | |
43 } | 37 } |
44 | 38 |
45 #define a_or a_or | 39 #define a_or a_or |
46 static inline void a_or(volatile int *p, int v) | 40 static inline void a_or(volatile int* p, int v) { |
47 { | 41 __asm__ __volatile__("lock ; or %1, %0" : "=m"(*p) : "r"(v) : "memory"); |
48 » __asm__ __volatile__( | |
49 » » "lock ; or %1, %0" | |
50 » » : "=m"(*p) : "r"(v) : "memory" ); | |
51 } | 42 } |
52 | 43 |
53 #define a_and_64 a_and_64 | 44 #define a_and_64 a_and_64 |
54 static inline void a_and_64(volatile uint64_t *p, uint64_t v) | 45 static inline void a_and_64(volatile uint64_t* p, uint64_t v) { |
55 { | 46 __asm__ __volatile("lock ; and %1, %0" : "=m"(*p) : "r"(v) : "memory"); |
56 » __asm__ __volatile( | |
57 » » "lock ; and %1, %0" | |
58 » » : "=m"(*p) : "r"(v) : "memory" ); | |
59 } | 47 } |
60 | 48 |
61 #define a_or_64 a_or_64 | 49 #define a_or_64 a_or_64 |
62 static inline void a_or_64(volatile uint64_t *p, uint64_t v) | 50 static inline void a_or_64(volatile uint64_t* p, uint64_t v) { |
63 { | 51 __asm__ __volatile__("lock ; or %1, %0" : "=m"(*p) : "r"(v) : "memory"); |
64 » __asm__ __volatile__( | |
65 » » "lock ; or %1, %0" | |
66 » » : "=m"(*p) : "r"(v) : "memory" ); | |
67 } | 52 } |
68 | 53 |
69 #define a_inc a_inc | 54 #define a_inc a_inc |
70 static inline void a_inc(volatile int *p) | 55 static inline void a_inc(volatile int* p) { |
71 { | 56 __asm__ __volatile__("lock ; incl %0" : "=m"(*p) : "m"(*p) : "memory"); |
72 » __asm__ __volatile__( | |
73 » » "lock ; incl %0" | |
74 » » : "=m"(*p) : "m"(*p) : "memory" ); | |
75 } | 57 } |
76 | 58 |
77 #define a_dec a_dec | 59 #define a_dec a_dec |
78 static inline void a_dec(volatile int *p) | 60 static inline void a_dec(volatile int* p) { |
79 { | 61 __asm__ __volatile__("lock ; decl %0" : "=m"(*p) : "m"(*p) : "memory"); |
80 » __asm__ __volatile__( | |
81 » » "lock ; decl %0" | |
82 » » : "=m"(*p) : "m"(*p) : "memory" ); | |
83 } | 62 } |
84 | 63 |
85 #define a_store a_store | 64 #define a_store a_store |
86 static inline void a_store(volatile int *p, int x) | 65 static inline void a_store(volatile int* p, int x) { |
87 { | 66 __asm__ __volatile__("mov %1, %0 ; lock ; orl $0,(%%rsp)" |
88 » __asm__ __volatile__( | 67 : "=m"(*p) |
89 » » "mov %1, %0 ; lock ; orl $0,(%%rsp)" | 68 : "r"(x) |
90 » » : "=m"(*p) : "r"(x) : "memory" ); | 69 : "memory"); |
91 } | 70 } |
92 | 71 |
93 #define a_barrier a_barrier | 72 #define a_barrier a_barrier |
94 static inline void a_barrier() | 73 static inline void a_barrier() { |
95 { | 74 __asm__ __volatile__("" : : : "memory"); |
96 » __asm__ __volatile__( "" : : : "memory" ); | |
97 } | 75 } |
98 | 76 |
99 #define a_pause a_pause | 77 #define a_pause a_pause |
100 static inline void a_spin() | 78 static inline void a_spin() { |
101 { | 79 __asm__ __volatile__("pause" : : : "memory"); |
102 » __asm__ __volatile__( "pause" : : : "memory" ); | |
103 } | 80 } |
104 | 81 |
105 #define a_crash a_crash | 82 #define a_crash a_crash |
106 static inline void a_crash() | 83 static inline void a_crash() { |
107 { | 84 __asm__ __volatile__("hlt" : : : "memory"); |
108 » __asm__ __volatile__( "hlt" : : : "memory" ); | |
109 } | 85 } |
110 | 86 |
111 #define a_ctz_64 a_ctz_64 | 87 #define a_ctz_64 a_ctz_64 |
112 static inline int a_ctz_64(uint64_t x) | 88 static inline int a_ctz_64(uint64_t x) { |
113 { | 89 __asm__("bsf %1,%0" : "=r"(x) : "r"(x)); |
114 » __asm__( "bsf %1,%0" : "=r"(x) : "r"(x) ); | 90 return x; |
115 » return x; | |
116 } | 91 } |
OLD | NEW |