atomic.h: add atomic64 cmpxchg, xchg and add_unless to powerpc
authorMathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Tue, 8 May 2007 07:34:27 +0000 (00:34 -0700)
committerLinus Torvalds <torvalds@woody.linux-foundation.org>
Tue, 8 May 2007 18:15:19 +0000 (11:15 -0700)
[akpm@linux-foundation.org: build fixes]
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Cc: Paul Mackerras <paulus@samba.org>
Cc: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
include/asm-powerpc/atomic.h
include/asm-powerpc/bitops.h
include/asm-powerpc/system.h

index 2ce4b6b7b34887456961fdc9227a184bcec535b8..438a7fcfba58dfd0f0a95137b952d0c85c45e6b5 100644 (file)
@@ -165,8 +165,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
        return t;
 }
 
-#define atomic_cmpxchg(v, o, n) \
-       ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
 
 /**
@@ -414,8 +413,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
        return t;
 }
 
-#define atomic64_cmpxchg(v, o, n) \
-       ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
+#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
 
 /**
index 8f757f6246e4effe36e790c811dd0b7939f4d875..8144a2788db67bc101e47b20545d8fecf03ded35 100644 (file)
@@ -39,7 +39,6 @@
 #ifdef __KERNEL__
 
 #include <linux/compiler.h>
-#include <asm/atomic.h>
 #include <asm/asm-compat.h>
 #include <asm/synch.h>
 
index d3e0906ff2bc055bf1df7b566ff293e2faf778c6..77bf5873a013734ba22a46945241a5032455148e 100644 (file)
@@ -7,7 +7,6 @@
 #include <linux/kernel.h>
 
 #include <asm/hw_irq.h>
-#include <asm/atomic.h>
 
 /*
  * Memory barrier.
@@ -227,6 +226,29 @@ __xchg_u32(volatile void *p, unsigned long val)
        return prev;
 }
 
+/*
+ * Atomic exchange
+ *
+ * Changes the memory location '*ptr' to be val and returns
+ * the previous value stored there.
+ */
+static __inline__ unsigned long
+__xchg_u32_local(volatile void *p, unsigned long val)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__(
+"1:    lwarx   %0,0,%2 \n"
+       PPC405_ERR77(0,%2)
+"      stwcx.  %3,0,%2 \n\
+       bne-    1b"
+       : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
+       : "r" (p), "r" (val)
+       : "cc", "memory");
+
+       return prev;
+}
+
 #ifdef CONFIG_PPC64
 static __inline__ unsigned long
 __xchg_u64(volatile void *p, unsigned long val)
@@ -246,6 +268,23 @@ __xchg_u64(volatile void *p, unsigned long val)
 
        return prev;
 }
+
+static __inline__ unsigned long
+__xchg_u64_local(volatile void *p, unsigned long val)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__(
+"1:    ldarx   %0,0,%2 \n"
+       PPC405_ERR77(0,%2)
+"      stdcx.  %3,0,%2 \n\
+       bne-    1b"
+       : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
+       : "r" (p), "r" (val)
+       : "cc", "memory");
+
+       return prev;
+}
 #endif
 
 /*
@@ -269,12 +308,33 @@ __xchg(volatile void *ptr, unsigned long x, unsigned int size)
        return x;
 }
 
+static __inline__ unsigned long
+__xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
+{
+       switch (size) {
+       case 4:
+               return __xchg_u32_local(ptr, x);
+#ifdef CONFIG_PPC64
+       case 8:
+               return __xchg_u64_local(ptr, x);
+#endif
+       }
+       __xchg_called_with_bad_pointer();
+       return x;
+}
 #define xchg(ptr,x)                                                         \
   ({                                                                        \
      __typeof__(*(ptr)) _x_ = (x);                                          \
      (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
   })
 
+#define xchg_local(ptr,x)                                                   \
+  ({                                                                        \
+     __typeof__(*(ptr)) _x_ = (x);                                          \
+     (__typeof__(*(ptr))) __xchg_local((ptr),                               \
+               (unsigned long)_x_, sizeof(*(ptr)));                         \
+  })
+
 #define tas(ptr) (xchg((ptr),1))
 
 /*
@@ -306,6 +366,28 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
        return prev;
 }
 
+static __inline__ unsigned long
+__cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
+                       unsigned long new)
+{
+       unsigned int prev;
+
+       __asm__ __volatile__ (
+"1:    lwarx   %0,0,%2         # __cmpxchg_u32\n\
+       cmpw    0,%0,%3\n\
+       bne-    2f\n"
+       PPC405_ERR77(0,%2)
+"      stwcx.  %4,0,%2\n\
+       bne-    1b"
+       "\n\
+2:"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (old), "r" (new)
+       : "cc", "memory");
+
+       return prev;
+}
+
 #ifdef CONFIG_PPC64
 static __inline__ unsigned long
 __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
@@ -328,6 +410,27 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
 
        return prev;
 }
+
+static __inline__ unsigned long
+__cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
+                       unsigned long new)
+{
+       unsigned long prev;
+
+       __asm__ __volatile__ (
+"1:    ldarx   %0,0,%2         # __cmpxchg_u64\n\
+       cmpd    0,%0,%3\n\
+       bne-    2f\n\
+       stdcx.  %4,0,%2\n\
+       bne-    1b"
+       "\n\
+2:"
+       : "=&r" (prev), "+m" (*p)
+       : "r" (p), "r" (old), "r" (new)
+       : "cc", "memory");
+
+       return prev;
+}
 #endif
 
 /* This function doesn't exist, so you'll get a linker error
@@ -350,6 +453,22 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
        return old;
 }
 
+static __inline__ unsigned long
+__cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new,
+         unsigned int size)
+{
+       switch (size) {
+       case 4:
+               return __cmpxchg_u32_local(ptr, old, new);
+#ifdef CONFIG_PPC64
+       case 8:
+               return __cmpxchg_u64_local(ptr, old, new);
+#endif
+       }
+       __cmpxchg_called_with_bad_pointer();
+       return old;
+}
+
 #define cmpxchg(ptr,o,n)                                                \
   ({                                                                    \
      __typeof__(*(ptr)) _o_ = (o);                                      \
@@ -358,6 +477,15 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
                                    (unsigned long)_n_, sizeof(*(ptr))); \
   })
 
+
+#define cmpxchg_local(ptr,o,n)                                          \
+  ({                                                                    \
+     __typeof__(*(ptr)) _o_ = (o);                                      \
+     __typeof__(*(ptr)) _n_ = (n);                                      \
+     (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_,    \
+                                   (unsigned long)_n_, sizeof(*(ptr))); \
+  })
+
 #ifdef CONFIG_PPC64
 /*
  * We handle most unaligned accesses in hardware. On the other hand