1 #ifndef __ASM_SH_ATOMIC_H
2 #define __ASM_SH_ATOMIC_H
5 * Atomic operations that C can't guarantee us. Useful for
6 * resource counting etc..
10 typedef struct { volatile int counter; } atomic_t;
12 #define ATOMIC_INIT(i) ( (atomic_t) { (i) } )
14 #define atomic_read(v) ((v)->counter)
15 #define atomic_set(v,i) ((v)->counter = (i))
17 #include <linux/compiler.h>
18 #include <asm/system.h>
21 * To get proper branch prediction for the main line, we must branch
22 * forward to code at the end of this object's .text section, then
23 * branch back to restart the operation.
26 static __inline__ void atomic_add(int i, atomic_t * v)
30 local_irq_save(flags);
32 local_irq_restore(flags);
35 static __inline__ void atomic_sub(int i, atomic_t *v)
39 local_irq_save(flags);
41 local_irq_restore(flags);
44 static __inline__ int atomic_add_return(int i, atomic_t * v)
46 unsigned long temp, flags;
48 local_irq_save(flags);
52 local_irq_restore(flags);
57 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
59 static __inline__ int atomic_sub_return(int i, atomic_t * v)
61 unsigned long temp, flags;
63 local_irq_save(flags);
67 local_irq_restore(flags);
72 #define atomic_dec_return(v) atomic_sub_return(1,(v))
73 #define atomic_inc_return(v) atomic_add_return(1,(v))
76 * atomic_inc_and_test - increment and test
77 * @v: pointer of type atomic_t
79 * Atomically increments @v by 1
80 * and returns true if the result is zero, or false for all
83 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
85 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
86 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
88 #define atomic_inc(v) atomic_add(1,(v))
89 #define atomic_dec(v) atomic_sub(1,(v))
91 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
96 local_irq_save(flags);
98 if (likely(ret == old))
100 local_irq_restore(flags);
105 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
107 static inline int atomic_add_unless(atomic_t *v, int a, int u)
112 local_irq_save(flags);
116 local_irq_restore(flags);
120 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
122 static __inline__ void atomic_clear_mask(unsigned int mask, atomic_t *v)
126 local_irq_save(flags);
128 local_irq_restore(flags);
131 static __inline__ void atomic_set_mask(unsigned int mask, atomic_t *v)
135 local_irq_save(flags);
137 local_irq_restore(flags);
140 /* Atomic operations are already serializing on SH */
141 #define smp_mb__before_atomic_dec() barrier()
142 #define smp_mb__after_atomic_dec() barrier()
143 #define smp_mb__before_atomic_inc() barrier()
144 #define smp_mb__after_atomic_inc() barrier()
146 #include <asm-generic/atomic.h>
147 #endif /* __ASM_SH_ATOMIC_H */