s390/cmpxchg,percpu: implement cmpxchg_double()
authorHeiko Carstens <heiko.carstens@de.ibm.com>
Mon, 17 Sep 2012 05:37:13 +0000 (07:37 +0200)
committerMartin Schwidefsky <schwidefsky@de.ibm.com>
Wed, 26 Sep 2012 13:45:25 +0000 (15:45 +0200)
Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com>
Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
arch/s390/Kconfig
arch/s390/include/asm/cmpxchg.h
arch/s390/include/asm/percpu.h

index 3f42161d6bc6c29a57a1c10c22fb0439679f75f8..e601340f277a45cae7083155acacc7dcaac1b790 100644 (file)
@@ -96,6 +96,7 @@ config S390
        select HAVE_MEMBLOCK
        select HAVE_MEMBLOCK_NODE_MAP
        select HAVE_CMPXCHG_LOCAL
+       select HAVE_CMPXCHG_DOUBLE
        select ARCH_DISCARD_MEMBLOCK
        select BUILDTIME_EXTABLE_SORT
        select ARCH_INLINE_SPIN_TRYLOCK
index 8d798e962b632c9a8aa426576077a48d1f6f0a38..0f636cbdf3420973f169a591d03614b5f173ca11 100644 (file)
@@ -7,7 +7,9 @@
 #ifndef __ASM_CMPXCHG_H
 #define __ASM_CMPXCHG_H
 
+#include <linux/mmdebug.h>
 #include <linux/types.h>
+#include <linux/bug.h>
 
 extern void __xchg_called_with_bad_pointer(void);
 
@@ -203,6 +205,65 @@ static inline unsigned long long __cmpxchg64(void *ptr,
 })
 #endif /* CONFIG_64BIT */
 
+#define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn)              \
+({                                                                     \
+       register __typeof__(*(p1)) __old1 asm("2") = (o1);              \
+       register __typeof__(*(p2)) __old2 asm("3") = (o2);              \
+       register __typeof__(*(p1)) __new1 asm("4") = (n1);              \
+       register __typeof__(*(p2)) __new2 asm("5") = (n2);              \
+       int cc;                                                         \
+       asm volatile(                                                   \
+                       insn   " %[old],%[new],%[ptr]\n"                \
+               "       ipm     %[cc]\n"                                \
+               "       srl     %[cc],28"                               \
+               : [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2)    \
+               : [new] "d" (__new1), "d" (__new2),                     \
+                 [ptr] "Q" (*(p1)), "Q" (*(p2))                        \
+               : "memory", "cc");                                      \
+       !cc;                                                            \
+})
+
+#define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \
+       __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds")
+
+#define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \
+       __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg")
+
+extern void __cmpxchg_double_called_with_bad_pointer(void);
+
+#define __cmpxchg_double(p1, p2, o1, o2, n1, n2)                       \
+({                                                                     \
+       int __ret;                                                      \
+       switch (sizeof(*(p1))) {                                        \
+       case 4:                                                         \
+               __ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2);     \
+               break;                                                  \
+       case 8:                                                         \
+               __ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2);     \
+               break;                                                  \
+       default:                                                        \
+               __cmpxchg_double_called_with_bad_pointer();             \
+       }                                                               \
+       __ret;                                                          \
+})
+
+#define cmpxchg_double(p1, p2, o1, o2, n1, n2)                         \
+({                                                                     \
+       __typeof__(p1) __p1 = (p1);                                     \
+       __typeof__(p2) __p2 = (p2);                                     \
+       int __ret;                                                      \
+       BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long));                    \
+       BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long));                    \
+       VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
+       if (sizeof(long) == 4)                                          \
+               __ret = __cmpxchg_double_4(__p1, __p2, o1, o2, n1, n2); \
+       else                                                            \
+               __ret = __cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2); \
+       __ret;                                                          \
+})
+
+#define system_has_cmpxchg_double()    1
+
 #include <asm-generic/cmpxchg-local.h>
 
 static inline unsigned long __cmpxchg_local(void *ptr,
index 964e7ee872f706606e4d5dfb765c13e294ce5a43..86fe0ee2cee5945beacca37f563d6d118fae0a12 100644 (file)
@@ -67,7 +67,7 @@
 #define this_cpu_xor_4(pcp, val) arch_this_cpu_to_op(pcp, val, ^)
 #define this_cpu_xor_8(pcp, val) arch_this_cpu_to_op(pcp, val, ^)
 
-#define arch_this_cpu_cmpxchg(pcp, oval, nval)                 \
+#define arch_this_cpu_cmpxchg(pcp, oval, nval)                         \
 ({                                                                     \
        typedef typeof(pcp) pcp_op_T__;                                 \
        pcp_op_T__ ret__;                                               \
 #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval)
 #endif
 
+#define arch_this_cpu_cmpxchg_double(pcp1, pcp2, o1, o2, n1, n2)       \
+({                                                                     \
+       typeof(pcp1) o1__ = (o1), n1__ = (n1);                          \
+       typeof(pcp2) o2__ = (o2), n2__ = (n2);                          \
+       typeof(pcp1) *p1__;                                             \
+       typeof(pcp2) *p2__;                                             \
+       int ret__;                                                      \
+       preempt_disable();                                              \
+       p1__ = __this_cpu_ptr(&(pcp1));                                 \
+       p2__ = __this_cpu_ptr(&(pcp2));                                 \
+       ret__ = __cmpxchg_double(p1__, p2__, o1__, o2__, n1__, n2__);   \
+       preempt_enable();                                               \
+       ret__;                                                          \
+})
+
+#define this_cpu_cmpxchg_double_4 arch_this_cpu_cmpxchg_double
+#ifdef CONFIG_64BIT
+#define this_cpu_cmpxchg_double_8 arch_this_cpu_cmpxchg_double
+#endif
+
 #include <asm-generic/percpu.h>
 
 #endif /* __ARCH_S390_PERCPU__ */