include/asm-x86/cmpxchg_64.h: checkpatch cleanups - formatting only
authorJoe Perches <joe@perches.com>
Sun, 23 Mar 2008 08:01:52 +0000 (01:01 -0700)
committerIngo Molnar <mingo@elte.hu>
Thu, 17 Apr 2008 15:41:22 +0000 (17:41 +0200)
Signed-off-by: Joe Perches <joe@perches.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
include/asm-x86/cmpxchg_64.h

index 56f5b41e071c49b2c2f2f50a481858411a4a224e..d9b26b9a28cf9f0e3f64f4206f1922b331408f54 100644 (file)
@@ -3,7 +3,8 @@
 
 #include <asm/alternative.h> /* Provides LOCK_PREFIX */
 
-#define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
+#define xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), \
+                                                (ptr), sizeof(*(ptr))))
 
 #define __xg(x) ((volatile long *)(x))
 
@@ -19,33 +20,34 @@ static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
  * Note 2: xchg has side effect, so that attribute volatile is necessary,
  *       but generally the primitive is invalid, *ptr is output argument. --ANK
  */
-static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
+static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
+                                  int size)
 {
        switch (size) {
-               case 1:
-                       __asm__ __volatile__("xchgb %b0,%1"
-                               :"=q" (x)
-                               :"m" (*__xg(ptr)), "0" (x)
-                               :"memory");
-                       break;
-               case 2:
-                       __asm__ __volatile__("xchgw %w0,%1"
-                               :"=r" (x)
-                               :"m" (*__xg(ptr)), "0" (x)
-                               :"memory");
-                       break;
-               case 4:
-                       __asm__ __volatile__("xchgl %k0,%1"
-                               :"=r" (x)
-                               :"m" (*__xg(ptr)), "0" (x)
-                               :"memory");
-                       break;
-               case 8:
-                       __asm__ __volatile__("xchgq %0,%1"
-                               :"=r" (x)
-                               :"m" (*__xg(ptr)), "0" (x)
-                               :"memory");
-                       break;
+       case 1:
+               asm volatile("xchgb %b0,%1"
+                            "=q" (x)
+                            "m" (*__xg(ptr)), "0" (x)
+                            "memory");
+               break;
+       case 2:
+               asm volatile("xchgw %w0,%1"
+                            "=r" (x)
+                            "m" (*__xg(ptr)), "0" (x)
+                            "memory");
+               break;
+       case 4:
+               asm volatile("xchgl %k0,%1"
+                            "=r" (x)
+                            "m" (*__xg(ptr)), "0" (x)
+                            "memory");
+               break;
+       case 8:
+               asm volatile("xchgq %0,%1"
+                            "=r" (x)
+                            "m" (*__xg(ptr)), "0" (x)
+                            "memory");
+               break;
        }
        return x;
 }
@@ -64,61 +66,62 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
        unsigned long prev;
        switch (size) {
        case 1:
-               __asm__ __volatile__(LOCK_PREFIX "cmpxchgb %b1,%2"
-                                    : "=a"(prev)
-                                    : "q"(new), "m"(*__xg(ptr)), "0"(old)
-                                    : "memory");
+               asm volatile(LOCK_PREFIX "cmpxchgb %b1,%2"
+                            : "=a"(prev)
+                            : "q"(new), "m"(*__xg(ptr)), "0"(old)
+                            : "memory");
                return prev;
        case 2:
-               __asm__ __volatile__(LOCK_PREFIX "cmpxchgw %w1,%2"
-                                    : "=a"(prev)
-                                    : "r"(new), "m"(*__xg(ptr)), "0"(old)
-                                    : "memory");
+               asm volatile(LOCK_PREFIX "cmpxchgw %w1,%2"
+                            : "=a"(prev)
+                            : "r"(new), "m"(*__xg(ptr)), "0"(old)
+                            : "memory");
                return prev;
        case 4:
-               __asm__ __volatile__(LOCK_PREFIX "cmpxchgl %k1,%2"
-                                    : "=a"(prev)
-                                    : "r"(new), "m"(*__xg(ptr)), "0"(old)
-                                    : "memory");
+               asm volatile(LOCK_PREFIX "cmpxchgl %k1,%2"
+                            : "=a"(prev)
+                            : "r"(new), "m"(*__xg(ptr)), "0"(old)
+                            : "memory");
                return prev;
        case 8:
-               __asm__ __volatile__(LOCK_PREFIX "cmpxchgq %1,%2"
-                                    : "=a"(prev)
-                                    : "r"(new), "m"(*__xg(ptr)), "0"(old)
-                                    : "memory");
+               asm volatile(LOCK_PREFIX "cmpxchgq %1,%2"
+                            : "=a"(prev)
+                            : "r"(new), "m"(*__xg(ptr)), "0"(old)
+                            : "memory");
                return prev;
        }
        return old;
 }
 
 static inline unsigned long __cmpxchg_local(volatile void *ptr,
-                       unsigned long old, unsigned long new, int size)
+                                           unsigned long old,
+                                           unsigned long new, int size)
 {
        unsigned long prev;
        switch (size) {
        case 1:
-               __asm__ __volatile__("cmpxchgb %b1,%2"
-                                    : "=a"(prev)
-                                    : "q"(new), "m"(*__xg(ptr)), "0"(old)
-                                    : "memory");
+               asm volatile("cmpxchgb %b1,%2"
+                            : "=a"(prev)
+                            : "q"(new), "m"(*__xg(ptr)), "0"(old)
+                            : "memory");
                return prev;
        case 2:
-               __asm__ __volatile__("cmpxchgw %w1,%2"
-                                    : "=a"(prev)
-                                    : "r"(new), "m"(*__xg(ptr)), "0"(old)
-                                    : "memory");
+               asm volatile("cmpxchgw %w1,%2"
+                            : "=a"(prev)
+                            : "r"(new), "m"(*__xg(ptr)), "0"(old)
+                            : "memory");
                return prev;
        case 4:
-               __asm__ __volatile__("cmpxchgl %k1,%2"
-                                    : "=a"(prev)
-                                    : "r"(new), "m"(*__xg(ptr)), "0"(old)
-                                    : "memory");
+               asm volatile("cmpxchgl %k1,%2"
+                            : "=a"(prev)
+                            : "r"(new), "m"(*__xg(ptr)), "0"(old)
+                            : "memory");
                return prev;
        case 8:
-               __asm__ __volatile__("cmpxchgq %1,%2"
-                                    : "=a"(prev)
-                                    : "r"(new), "m"(*__xg(ptr)), "0"(old)
-                                    : "memory");
+               asm volatile("cmpxchgq %1,%2"
+                            : "=a"(prev)
+                            : "r"(new), "m"(*__xg(ptr)), "0"(old)
+                            : "memory");
                return prev;
        }
        return old;
@@ -126,19 +129,20 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
 
 #define cmpxchg(ptr, o, n)                                             \
        ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),       \
-                                       (unsigned long)(n), sizeof(*(ptr))))
+                                      (unsigned long)(n), sizeof(*(ptr))))
 #define cmpxchg64(ptr, o, n)                                           \
-  ({                                                                   \
+({                                                                     \
        BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
        cmpxchg((ptr), (o), (n));                                       \
-  })
+})
 #define cmpxchg_local(ptr, o, n)                                       \
        ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
-                                       (unsigned long)(n), sizeof(*(ptr))))
+                                            (unsigned long)(n),        \
+                                            sizeof(*(ptr))))
 #define cmpxchg64_local(ptr, o, n)                                     \
-  ({                                                                   \
+({                                                                     \
        BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
        cmpxchg_local((ptr), (o), (n));                                 \
-  })
+})
 
 #endif