1 #ifndef _PARISC_BITOPS_H
2 #define _PARISC_BITOPS_H
4 #ifndef _LINUX_BITOPS_H
5 #error only <linux/bitops.h> can be included directly
8 #include <linux/compiler.h>
10 #include <asm/byteorder.h>
11 #include <asm/barrier.h>
12 #include <linux/atomic.h>
15 * HP-PARISC specific bit operations
16 * for a detailed description of the functions please refer
17 * to include/asm-i386/bitops.h or kerneldoc
20 #if __BITS_PER_LONG == 64
21 #define SHIFT_PER_LONG 6
23 #define SHIFT_PER_LONG 5
26 #define CHOP_SHIFTCOUNT(x) (((unsigned long) (x)) & (BITS_PER_LONG - 1))
29 /* See http://marc.theaimsgroup.com/?t=108826637900003 for discussion
30 * on use of volatile and __*_bit() (set/clear/change):
31 * *_bit() want use of volatile.
32 * __*_bit() are "relaxed" and don't use spinlock or volatile.
35 static __inline__ void set_bit(int nr, volatile unsigned long * addr)
37 unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr);
40 addr += (nr >> SHIFT_PER_LONG);
41 _atomic_spin_lock_irqsave(addr, flags);
43 _atomic_spin_unlock_irqrestore(addr, flags);
46 static __inline__ void clear_bit(int nr, volatile unsigned long * addr)
48 unsigned long mask = ~(1UL << CHOP_SHIFTCOUNT(nr));
51 addr += (nr >> SHIFT_PER_LONG);
52 _atomic_spin_lock_irqsave(addr, flags);
54 _atomic_spin_unlock_irqrestore(addr, flags);
57 static __inline__ void change_bit(int nr, volatile unsigned long * addr)
59 unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr);
62 addr += (nr >> SHIFT_PER_LONG);
63 _atomic_spin_lock_irqsave(addr, flags);
65 _atomic_spin_unlock_irqrestore(addr, flags);
68 static __inline__ int test_and_set_bit(int nr, volatile unsigned long * addr)
70 unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr);
75 addr += (nr >> SHIFT_PER_LONG);
76 _atomic_spin_lock_irqsave(addr, flags);
78 set = (old & mask) ? 1 : 0;
81 _atomic_spin_unlock_irqrestore(addr, flags);
86 static __inline__ int test_and_clear_bit(int nr, volatile unsigned long * addr)
88 unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr);
93 addr += (nr >> SHIFT_PER_LONG);
94 _atomic_spin_lock_irqsave(addr, flags);
96 set = (old & mask) ? 1 : 0;
99 _atomic_spin_unlock_irqrestore(addr, flags);
104 static __inline__ int test_and_change_bit(int nr, volatile unsigned long * addr)
106 unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr);
107 unsigned long oldbit;
110 addr += (nr >> SHIFT_PER_LONG);
111 _atomic_spin_lock_irqsave(addr, flags);
113 *addr = oldbit ^ mask;
114 _atomic_spin_unlock_irqrestore(addr, flags);
116 return (oldbit & mask) ? 1 : 0;
119 #include <asm-generic/bitops/non-atomic.h>
124 * __ffs - find first bit in word. returns 0 to "BITS_PER_LONG-1".
125 * @word: The word to search
127 * __ffs() return is undefined if no bit is set.
129 * 32-bit fast __ffs by LaMont Jones "lamont At hp com".
130 * 64-bit enhancement by Grant Grundler "grundler At parisc-linux org".
131 * (with help from willy/jejb to get the semantics right)
133 * This algorithm avoids branches by making use of nullification.
134 * One side effect of "extr" instructions is it sets PSW[N] bit.
135 * How PSW[N] (nullify next insn) gets set is determined by the
136 * "condition" field (eg "<>" or "TR" below) in the extr* insn.
137 * Only the 1st and one of either the 2cd or 3rd insn will get executed.
138 * Each set of 3 insn will get executed in 2 cycles on PA8x00 vs 16 or so
139 * cycles for each mispredicted branch.
142 static __inline__ unsigned long __ffs(unsigned long x)
149 " extrd,u,*<> %0,63,32,%%r0\n"
150 " extrd,u,*TR %0,31,32,%0\n" /* move top 32-bits down */
155 " extru,<> %0,31,16,%%r0\n"
156 " extru,TR %0,15,16,%0\n" /* xxxx0000 -> 0000xxxx */
158 " extru,<> %0,31,8,%%r0\n"
159 " extru,TR %0,23,8,%0\n" /* 0000xx00 -> 000000xx */
161 " extru,<> %0,31,4,%%r0\n"
162 " extru,TR %0,27,4,%0\n" /* 000000x0 -> 0000000x */
164 " extru,<> %0,31,2,%%r0\n"
165 " extru,TR %0,29,2,%0\n" /* 0000000y, 1100b -> 0011b */
167 " extru,= %0,31,1,%%r0\n" /* check last bit */
169 : "+r" (x), "=r" (ret) );
173 #include <asm-generic/bitops/ffz.h>
176 * ffs: find first bit set. returns 1 to BITS_PER_LONG or 0 (if none set)
177 * This is defined the same way as the libc and compiler builtin
178 * ffs routines, therefore differs in spirit from the above ffz (man ffs).
180 static __inline__ int ffs(int x)
182 return x ? (__ffs((unsigned long)x) + 1) : 0;
186 * fls: find last (most significant) bit set.
187 * fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
190 static __inline__ int fls(int x)
198 " extru,<> %0,15,16,%%r0\n"
199 " zdep,TR %0,15,16,%0\n" /* xxxx0000 */
201 " extru,<> %0,7,8,%%r0\n"
202 " zdep,TR %0,23,24,%0\n" /* xx000000 */
204 " extru,<> %0,3,4,%%r0\n"
205 " zdep,TR %0,27,28,%0\n" /* x0000000 */
207 " extru,<> %0,1,2,%%r0\n"
208 " zdep,TR %0,29,30,%0\n" /* y0000000 (y&3 = 0) */
210 " extru,= %0,0,1,%%r0\n"
211 " addi 1,%1,%1\n" /* if y & 8, add 1 */
212 : "+r" (x), "=r" (ret) );
217 #include <asm-generic/bitops/__fls.h>
218 #include <asm-generic/bitops/fls64.h>
219 #include <asm-generic/bitops/hweight.h>
220 #include <asm-generic/bitops/lock.h>
221 #include <asm-generic/bitops/sched.h>
223 #endif /* __KERNEL__ */
225 #include <asm-generic/bitops/find.h>
229 #include <asm-generic/bitops/le.h>
230 #include <asm-generic/bitops/ext2-atomic-setbit.h>
232 #endif /* __KERNEL__ */
234 #endif /* _PARISC_BITOPS_H */