OLD | NEW |
(Empty) | |
| 1 #ifndef _INTERNAL_ATOMIC_H |
| 2 #define _INTERNAL_ATOMIC_H |
| 3 |
| 4 #include <stdint.h> |
| 5 |
| 6 static inline int a_ctz_64(uint64_t x) |
| 7 { |
| 8 int r; |
| 9 __asm__( "bsf %1,%0 ; jnz 1f ; bsf %2,%0 ; addl $32,%0\n1:" |
| 10 : "=&r"(r) : "r"((unsigned)x), "r"((unsigned)(x>>32)) ); |
| 11 return r; |
| 12 } |
| 13 |
| 14 static inline int a_ctz_l(unsigned long x) |
| 15 { |
| 16 long r; |
| 17 __asm__( "bsf %1,%0" : "=r"(r) : "r"(x) ); |
| 18 return r; |
| 19 } |
| 20 |
| 21 static inline void a_and_64(volatile uint64_t *p, uint64_t v) |
| 22 { |
| 23 __asm__( "lock ; andl %1, (%0) ; lock ; andl %2, 4(%0)" |
| 24 : : "r"((long *)p), "r"((unsigned)v), "r"((unsigned)(v>>32)) : "
memory" ); |
| 25 } |
| 26 |
| 27 static inline void a_or_64(volatile uint64_t *p, uint64_t v) |
| 28 { |
| 29 __asm__( "lock ; orl %1, (%0) ; lock ; orl %2, 4(%0)" |
| 30 : : "r"((long *)p), "r"((unsigned)v), "r"((unsigned)(v>>32)) : "
memory" ); |
| 31 } |
| 32 |
| 33 static inline void a_or_l(volatile void *p, long v) |
| 34 { |
| 35 __asm__( "lock ; orl %1, %0" |
| 36 : "=m"(*(long *)p) : "r"(v) : "memory" ); |
| 37 } |
| 38 |
| 39 static inline void *a_cas_p(volatile void *p, void *t, void *s) |
| 40 { |
| 41 __asm__( "lock ; cmpxchg %3, %1" |
| 42 : "=a"(t), "=m"(*(long *)p) : "a"(t), "r"(s) : "memory" ); |
| 43 return t; |
| 44 } |
| 45 |
| 46 static inline int a_cas(volatile int *p, int t, int s) |
| 47 { |
| 48 __asm__( "lock ; cmpxchg %3, %1" |
| 49 : "=a"(t), "=m"(*p) : "a"(t), "r"(s) : "memory" ); |
| 50 return t; |
| 51 } |
| 52 |
| 53 static inline void a_or(volatile int *p, int v) |
| 54 { |
| 55 __asm__( "lock ; orl %1, %0" |
| 56 : "=m"(*p) : "r"(v) : "memory" ); |
| 57 } |
| 58 |
| 59 static inline void a_and(volatile int *p, int v) |
| 60 { |
| 61 __asm__( "lock ; andl %1, %0" |
| 62 : "=m"(*p) : "r"(v) : "memory" ); |
| 63 } |
| 64 |
| 65 static inline int a_swap(volatile int *x, int v) |
| 66 { |
| 67 __asm__( "xchg %0, %1" : "=r"(v), "=m"(*x) : "0"(v) : "memory" ); |
| 68 return v; |
| 69 } |
| 70 |
| 71 #define a_xchg a_swap |
| 72 |
| 73 static inline int a_fetch_add(volatile int *x, int v) |
| 74 { |
| 75 __asm__( "lock ; xadd %0, %1" : "=r"(v), "=m"(*x) : "0"(v) : "memory" ); |
| 76 return v; |
| 77 } |
| 78 |
| 79 static inline void a_inc(volatile int *x) |
| 80 { |
| 81 __asm__( "lock ; incl %0" : "=m"(*x) : "m"(*x) : "memory" ); |
| 82 } |
| 83 |
| 84 static inline void a_dec(volatile int *x) |
| 85 { |
| 86 __asm__( "lock ; decl %0" : "=m"(*x) : "m"(*x) : "memory" ); |
| 87 } |
| 88 |
| 89 static inline void a_store(volatile int *p, int x) |
| 90 { |
| 91 __asm__( "movl %1, %0 ; lock ; orl $0,(%%esp)" : "=m"(*p) : "r"(x) : "me
mory" ); |
| 92 } |
| 93 |
| 94 static inline void a_spin() |
| 95 { |
| 96 __asm__ __volatile__( "pause" : : : "memory" ); |
| 97 } |
| 98 |
| 99 static inline void a_barrier() |
| 100 { |
| 101 __asm__ __volatile__( "" : : : "memory" ); |
| 102 } |
| 103 |
| 104 static inline void a_crash() |
| 105 { |
| 106 __asm__ __volatile__( "hlt" : : : "memory" ); |
| 107 } |
| 108 |
| 109 |
| 110 #endif |
OLD | NEW |