1 /* Atomic operations usable in machine independent code */
2 #ifndef _LINUX_ATOMIC_H
3 #define _LINUX_ATOMIC_H
4 #include <asm/atomic.h>
7 * Provide __deprecated wrappers for the new interface, avoid flag day changes.
8 * We need the ugly external functions to break header recursion hell.
10 #ifndef smp_mb__before_atomic_inc
11 static inline void __deprecated smp_mb__before_atomic_inc(void)
13 extern void __smp_mb__before_atomic(void);
14 __smp_mb__before_atomic();
18 #ifndef smp_mb__after_atomic_inc
19 static inline void __deprecated smp_mb__after_atomic_inc(void)
21 extern void __smp_mb__after_atomic(void);
22 __smp_mb__after_atomic();
26 #ifndef smp_mb__before_atomic_dec
27 static inline void __deprecated smp_mb__before_atomic_dec(void)
29 extern void __smp_mb__before_atomic(void);
30 __smp_mb__before_atomic();
34 #ifndef smp_mb__after_atomic_dec
35 static inline void __deprecated smp_mb__after_atomic_dec(void)
37 extern void __smp_mb__after_atomic(void);
38 __smp_mb__after_atomic();
43 * atomic_add_unless - add unless the number is already a given value
44 * @v: pointer of type atomic_t
45 * @a: the amount to add to v...
46 * @u: ...unless v is equal to u.
48 * Atomically adds @a to @v, so long as @v was not already @u.
49 * Returns non-zero if @v was not @u, and zero otherwise.
51 static inline int atomic_add_unless(atomic_t *v, int a, int u)
53 return __atomic_add_unless(v, a, u) != u;
57 * atomic_inc_not_zero - increment unless the number is zero
58 * @v: pointer of type atomic_t
60 * Atomically increments @v by 1, so long as @v is non-zero.
61 * Returns non-zero if @v was non-zero, and zero otherwise.
63 #ifndef atomic_inc_not_zero
64 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
68 * atomic_inc_not_zero_hint - increment if not null
69 * @v: pointer of type atomic_t
70 * @hint: probable value of the atomic before the increment
72 * This version of atomic_inc_not_zero() gives a hint of probable
73 * value of the atomic. This helps processor to not read the memory
74 * before doing the atomic read/modify/write cycle, lowering
75 * number of bus transactions on some arches.
77 * Returns: 0 if increment was not done, 1 otherwise.
79 #ifndef atomic_inc_not_zero_hint
80 static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
84 /* sanity test, should be removed by compiler if hint is a constant */
86 return atomic_inc_not_zero(v);
89 val = atomic_cmpxchg(v, c, c + 1);
99 #ifndef atomic_inc_unless_negative
100 static inline int atomic_inc_unless_negative(atomic_t *p)
103 for (v = 0; v >= 0; v = v1) {
104 v1 = atomic_cmpxchg(p, v, v + 1);
112 #ifndef atomic_dec_unless_positive
113 static inline int atomic_dec_unless_positive(atomic_t *p)
116 for (v = 0; v <= 0; v = v1) {
117 v1 = atomic_cmpxchg(p, v, v - 1);
126 * atomic_dec_if_positive - decrement by 1 if old value positive
127 * @v: pointer of type atomic_t
129 * The function returns the old value of *v minus 1, even if
130 * the atomic variable, v, was not decremented.
132 #ifndef atomic_dec_if_positive
133 static inline int atomic_dec_if_positive(atomic_t *v)
139 if (unlikely(dec < 0))
141 old = atomic_cmpxchg((v), c, dec);
142 if (likely(old == c))
150 #ifndef CONFIG_ARCH_HAS_ATOMIC_OR
151 static inline void atomic_or(int i, atomic_t *v)
157 old = atomic_read(v);
159 } while (atomic_cmpxchg(v, old, new) != old);
161 #endif /* #ifndef CONFIG_ARCH_HAS_ATOMIC_OR */
163 #include <asm-generic/atomic-long.h>
164 #ifdef CONFIG_GENERIC_ATOMIC64
165 #include <asm-generic/atomic64.h>
167 #endif /* _LINUX_ATOMIC_H */