OLD | NEW |
1 #define a_cas a_cas | 1 #define a_cas a_cas |
2 static inline int a_cas(volatile int *p, int t, int s) | 2 static inline int a_cas(volatile int* p, int t, int s) { |
3 { | 3 __asm__ __volatile__("lock ; cmpxchg %3, %1" |
4 » __asm__ __volatile__ ( | 4 : "=a"(t), "=m"(*p) |
5 » » "lock ; cmpxchg %3, %1" | 5 : "a"(t), "r"(s) |
6 » » : "=a"(t), "=m"(*p) : "a"(t), "r"(s) : "memory" ); | 6 : "memory"); |
7 » return t; | 7 return t; |
8 } | 8 } |
9 | 9 |
10 #define a_swap a_swap | 10 #define a_swap a_swap |
11 static inline int a_swap(volatile int *p, int v) | 11 static inline int a_swap(volatile int* p, int v) { |
12 { | 12 __asm__ __volatile__("xchg %0, %1" : "=r"(v), "=m"(*p) : "0"(v) : "memory"); |
13 » __asm__ __volatile__( | 13 return v; |
14 » » "xchg %0, %1" | |
15 » » : "=r"(v), "=m"(*p) : "0"(v) : "memory" ); | |
16 » return v; | |
17 } | 14 } |
18 | 15 |
19 #define a_fetch_add a_fetch_add | 16 #define a_fetch_add a_fetch_add |
20 static inline int a_fetch_add(volatile int *p, int v) | 17 static inline int a_fetch_add(volatile int* p, int v) { |
21 { | 18 __asm__ __volatile__("lock ; xadd %0, %1" |
22 » __asm__ __volatile__( | 19 : "=r"(v), "=m"(*p) |
23 » » "lock ; xadd %0, %1" | 20 : "0"(v) |
24 » » : "=r"(v), "=m"(*p) : "0"(v) : "memory" ); | 21 : "memory"); |
25 » return v; | 22 return v; |
26 } | 23 } |
27 | 24 |
28 #define a_and a_and | 25 #define a_and a_and |
29 static inline void a_and(volatile int *p, int v) | 26 static inline void a_and(volatile int* p, int v) { |
30 { | 27 __asm__ __volatile__("lock ; and %1, %0" : "=m"(*p) : "r"(v) : "memory"); |
31 » __asm__ __volatile__( | |
32 » » "lock ; and %1, %0" | |
33 » » : "=m"(*p) : "r"(v) : "memory" ); | |
34 } | 28 } |
35 | 29 |
36 #define a_or a_or | 30 #define a_or a_or |
37 static inline void a_or(volatile int *p, int v) | 31 static inline void a_or(volatile int* p, int v) { |
38 { | 32 __asm__ __volatile__("lock ; or %1, %0" : "=m"(*p) : "r"(v) : "memory"); |
39 » __asm__ __volatile__( | |
40 » » "lock ; or %1, %0" | |
41 » » : "=m"(*p) : "r"(v) : "memory" ); | |
42 } | 33 } |
43 | 34 |
44 #define a_and_64 a_and_64 | 35 #define a_and_64 a_and_64 |
45 static inline void a_and_64(volatile uint64_t *p, uint64_t v) | 36 static inline void a_and_64(volatile uint64_t* p, uint64_t v) { |
46 { | 37 __asm__ __volatile("lock ; and %1, %0" : "=m"(*p) : "r"(v) : "memory"); |
47 » __asm__ __volatile( | |
48 » » "lock ; and %1, %0" | |
49 » » : "=m"(*p) : "r"(v) : "memory" ); | |
50 } | 38 } |
51 | 39 |
52 #define a_or_64 a_or_64 | 40 #define a_or_64 a_or_64 |
53 static inline void a_or_64(volatile uint64_t *p, uint64_t v) | 41 static inline void a_or_64(volatile uint64_t* p, uint64_t v) { |
54 { | 42 __asm__ __volatile__("lock ; or %1, %0" : "=m"(*p) : "r"(v) : "memory"); |
55 » __asm__ __volatile__( | |
56 » » "lock ; or %1, %0" | |
57 » » : "=m"(*p) : "r"(v) : "memory" ); | |
58 } | 43 } |
59 | 44 |
60 #define a_inc a_inc | 45 #define a_inc a_inc |
61 static inline void a_inc(volatile int *p) | 46 static inline void a_inc(volatile int* p) { |
62 { | 47 __asm__ __volatile__("lock ; incl %0" : "=m"(*p) : "m"(*p) : "memory"); |
63 » __asm__ __volatile__( | |
64 » » "lock ; incl %0" | |
65 » » : "=m"(*p) : "m"(*p) : "memory" ); | |
66 } | 48 } |
67 | 49 |
68 #define a_dec a_dec | 50 #define a_dec a_dec |
69 static inline void a_dec(volatile int *p) | 51 static inline void a_dec(volatile int* p) { |
70 { | 52 __asm__ __volatile__("lock ; decl %0" : "=m"(*p) : "m"(*p) : "memory"); |
71 » __asm__ __volatile__( | |
72 » » "lock ; decl %0" | |
73 » » : "=m"(*p) : "m"(*p) : "memory" ); | |
74 } | 53 } |
75 | 54 |
76 #define a_store a_store | 55 #define a_store a_store |
77 static inline void a_store(volatile int *p, int x) | 56 static inline void a_store(volatile int* p, int x) { |
78 { | 57 __asm__ __volatile__("mov %1, %0 ; lock ; orl $0,(%%rsp)" |
79 » __asm__ __volatile__( | 58 : "=m"(*p) |
80 » » "mov %1, %0 ; lock ; orl $0,(%%rsp)" | 59 : "r"(x) |
81 » » : "=m"(*p) : "r"(x) : "memory" ); | 60 : "memory"); |
82 } | 61 } |
83 | 62 |
84 #define a_barrier a_barrier | 63 #define a_barrier a_barrier |
85 static inline void a_barrier() | 64 static inline void a_barrier() { |
86 { | 65 __asm__ __volatile__("" : : : "memory"); |
87 » __asm__ __volatile__( "" : : : "memory" ); | |
88 } | 66 } |
89 | 67 |
90 #define a_pause a_pause | 68 #define a_pause a_pause |
91 static inline void a_spin() | 69 static inline void a_spin() { |
92 { | 70 __asm__ __volatile__("pause" : : : "memory"); |
93 » __asm__ __volatile__( "pause" : : : "memory" ); | |
94 } | 71 } |
95 | 72 |
96 #define a_crash a_crash | 73 #define a_crash a_crash |
97 static inline void a_crash() | 74 static inline void a_crash() { |
98 { | 75 __asm__ __volatile__("hlt" : : : "memory"); |
99 » __asm__ __volatile__( "hlt" : : : "memory" ); | |
100 } | 76 } |
101 | 77 |
102 #define a_ctz_64 a_ctz_64 | 78 #define a_ctz_64 a_ctz_64 |
103 static inline int a_ctz_64(uint64_t x) | 79 static inline int a_ctz_64(uint64_t x) { |
104 { | 80 __asm__("bsf %1,%0" : "=r"(x) : "r"(x)); |
105 » __asm__( "bsf %1,%0" : "=r"(x) : "r"(x) ); | 81 return x; |
106 » return x; | |
107 } | 82 } |
108 | 83 |
109 #define a_ctz_l a_ctz_l | 84 #define a_ctz_l a_ctz_l |
110 static inline int a_ctz_l(unsigned long x) | 85 static inline int a_ctz_l(unsigned long x) { |
111 { | 86 __asm__("bsf %1,%0" : "=r"(x) : "r"(x)); |
112 » __asm__( "bsf %1,%0" : "=r"(x) : "r"(x) ); | 87 return x; |
113 » return x; | |
114 } | 88 } |
OLD | NEW |