1 #ifndef _ASM_ARM_FUTEX_H
2 #define _ASM_ARM_FUTEX_H
6 #if defined(CONFIG_CPU_USE_DOMAINS) && defined(CONFIG_SMP)
7 /* ARM doesn't provide unprivileged exclusive memory accessors */
8 #include <asm-generic/futex.h>
11 #include <linux/futex.h>
12 #include <linux/uaccess.h>
13 #include <asm/errno.h>
15 #define __futex_atomic_ex_table(err_reg) \
17 " .pushsection __ex_table,\"a\"\n" \
19 " .long 1b, 4f, 2b, 4f\n" \
21 " .pushsection .fixup,\"ax\"\n" \
23 "4: mov %0, " err_reg "\n" \
29 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
31 __asm__ __volatile__( \
32 "1: ldrex %1, [%3]\n" \
34 "2: strex %2, %0, [%3]\n" \
38 __futex_atomic_ex_table("%5") \
39 : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
40 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
44 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
45 u32 oldval, u32 newval)
50 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
54 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
57 " ite eq @ explicit IT needed for the 2b label\n"
58 "2: strexeq %0, %3, [%4]\n"
62 __futex_atomic_ex_table("%5")
63 : "=&r" (ret), "=&r" (val)
64 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
72 #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
74 #include <linux/preempt.h>
75 #include <asm/domain.h>
77 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
78 __asm__ __volatile__( \
79 "1: " TUSER(ldr) " %1, [%3]\n" \
81 "2: " TUSER(str) " %0, [%3]\n" \
83 __futex_atomic_ex_table("%5") \
84 : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
85 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
89 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
90 u32 oldval, u32 newval)
95 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
98 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
99 "1: " TUSER(ldr) " %1, [%4]\n"
101 " it eq @ explicit IT needed for the 2b label\n"
102 "2: " TUSER(streq) " %3, [%4]\n"
103 __futex_atomic_ex_table("%5")
104 : "+r" (ret), "=&r" (val)
105 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
115 futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
117 int op = (encoded_op >> 28) & 7;
118 int cmp = (encoded_op >> 24) & 15;
119 int oparg = (encoded_op << 8) >> 20;
120 int cmparg = (encoded_op << 20) >> 20;
121 int oldval = 0, ret, tmp;
123 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
126 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
129 pagefault_disable(); /* implies preempt_disable() */
133 __futex_atomic_op("mov %0, %4", ret, oldval, tmp, uaddr, oparg);
136 __futex_atomic_op("add %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
139 __futex_atomic_op("orr %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
142 __futex_atomic_op("and %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
145 __futex_atomic_op("eor %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
151 pagefault_enable(); /* subsumes preempt_enable() */
155 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
156 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
157 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
158 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
159 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
160 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
161 default: ret = -ENOSYS;
167 #endif /* !(CPU_USE_DOMAINS && SMP) */
168 #endif /* __KERNEL__ */
169 #endif /* _ASM_ARM_FUTEX_H */