Index: fusl/arch/powerpc/atomic_arch.h |
diff --git a/fusl/arch/powerpc/atomic_arch.h b/fusl/arch/powerpc/atomic_arch.h |
index f31566b20956f7751db74e24250a33b566341928..d6f4db92663a9b7000560ce5517ee900e9570a11 100644 |
--- a/fusl/arch/powerpc/atomic_arch.h |
+++ b/fusl/arch/powerpc/atomic_arch.h |
@@ -1,39 +1,35 @@ |
#define a_ll a_ll |
-static inline int a_ll(volatile int *p) |
-{ |
- int v; |
- __asm__ __volatile__ ("lwarx %0, 0, %2" : "=r"(v) : "m"(*p), "r"(p)); |
- return v; |
+static inline int a_ll(volatile int* p) { |
+ int v; |
+ __asm__ __volatile__("lwarx %0, 0, %2" : "=r"(v) : "m"(*p), "r"(p)); |
+ return v; |
} |
#define a_sc a_sc |
-static inline int a_sc(volatile int *p, int v) |
-{ |
- int r; |
- __asm__ __volatile__ ( |
- "stwcx. %2, 0, %3 ; mfcr %0" |
- : "=r"(r), "=m"(*p) : "r"(v), "r"(p) : "memory", "cc"); |
- return r & 0x20000000; /* "bit 2" of "cr0" (backwards bit order) */ |
+static inline int a_sc(volatile int* p, int v) { |
+ int r; |
+ __asm__ __volatile__("stwcx. %2, 0, %3 ; mfcr %0" |
+ : "=r"(r), "=m"(*p) |
+ : "r"(v), "r"(p) |
+ : "memory", "cc"); |
+ return r & 0x20000000; /* "bit 2" of "cr0" (backwards bit order) */ |
} |
#define a_barrier a_barrier |
-static inline void a_barrier() |
-{ |
- __asm__ __volatile__ ("sync" : : : "memory"); |
+static inline void a_barrier() { |
+ __asm__ __volatile__("sync" : : : "memory"); |
} |
#define a_pre_llsc a_barrier |
#define a_post_llsc a_post_llsc |
-static inline void a_post_llsc() |
-{ |
- __asm__ __volatile__ ("isync" : : : "memory"); |
+static inline void a_post_llsc() { |
+ __asm__ __volatile__("isync" : : : "memory"); |
} |
#define a_store a_store |
-static inline void a_store(volatile int *p, int v) |
-{ |
- a_pre_llsc(); |
- *p = v; |
- a_post_llsc(); |
+static inline void a_store(volatile int* p, int v) { |
+ a_pre_llsc(); |
+ *p = v; |
+ a_post_llsc(); |
} |