]>
Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
1da177e4 LT |
2 | #ifndef _PARISC_BITOPS_H |
3 | #define _PARISC_BITOPS_H | |
0624517d JS |
4 | |
5 | #ifndef _LINUX_BITOPS_H | |
6 | #error only <linux/bitops.h> can be included directly | |
7 | #endif | |
1da177e4 LT |
8 | |
9 | #include <linux/compiler.h> | |
2ad5d52d | 10 | #include <asm/types.h> |
1da177e4 | 11 | #include <asm/byteorder.h> |
e4a65e9d | 12 | #include <asm/barrier.h> |
60063497 | 13 | #include <linux/atomic.h> |
1da177e4 | 14 | |
a366064c GG |
15 | /* See http://marc.theaimsgroup.com/?t=108826637900003 for discussion |
16 | * on use of volatile and __*_bit() (set/clear/change): | |
17 | * *_bit() want use of volatile. | |
18 | * __*_bit() are "relaxed" and don't use spinlock or volatile. | |
19 | */ | |
20 | ||
21 | static __inline__ void set_bit(int nr, volatile unsigned long * addr) | |
1da177e4 | 22 | { |
208151bf | 23 | unsigned long mask = BIT_MASK(nr); |
1da177e4 LT |
24 | unsigned long flags; |
25 | ||
208151bf | 26 | addr += BIT_WORD(nr); |
1da177e4 LT |
27 | _atomic_spin_lock_irqsave(addr, flags); |
28 | *addr |= mask; | |
29 | _atomic_spin_unlock_irqrestore(addr, flags); | |
30 | } | |
31 | ||
a366064c | 32 | static __inline__ void clear_bit(int nr, volatile unsigned long * addr) |
1da177e4 | 33 | { |
208151bf | 34 | unsigned long mask = BIT_MASK(nr); |
1da177e4 LT |
35 | unsigned long flags; |
36 | ||
208151bf | 37 | addr += BIT_WORD(nr); |
1da177e4 | 38 | _atomic_spin_lock_irqsave(addr, flags); |
208151bf | 39 | *addr &= ~mask; |
1da177e4 LT |
40 | _atomic_spin_unlock_irqrestore(addr, flags); |
41 | } | |
42 | ||
a366064c | 43 | static __inline__ void change_bit(int nr, volatile unsigned long * addr) |
1da177e4 | 44 | { |
208151bf | 45 | unsigned long mask = BIT_MASK(nr); |
1da177e4 LT |
46 | unsigned long flags; |
47 | ||
208151bf | 48 | addr += BIT_WORD(nr); |
1da177e4 LT |
49 | _atomic_spin_lock_irqsave(addr, flags); |
50 | *addr ^= mask; | |
51 | _atomic_spin_unlock_irqrestore(addr, flags); | |
52 | } | |
53 | ||
a366064c | 54 | static __inline__ int test_and_set_bit(int nr, volatile unsigned long * addr) |
1da177e4 | 55 | { |
208151bf | 56 | unsigned long mask = BIT_MASK(nr); |
af5917f0 | 57 | unsigned long old; |
1da177e4 | 58 | unsigned long flags; |
af5917f0 | 59 | int set; |
1da177e4 | 60 | |
208151bf | 61 | addr += BIT_WORD(nr); |
1da177e4 | 62 | _atomic_spin_lock_irqsave(addr, flags); |
af5917f0 MW |
63 | old = *addr; |
64 | set = (old & mask) ? 1 : 0; | |
65 | if (!set) | |
66 | *addr = old | mask; | |
1da177e4 LT |
67 | _atomic_spin_unlock_irqrestore(addr, flags); |
68 | ||
af5917f0 | 69 | return set; |
1da177e4 LT |
70 | } |
71 | ||
a366064c | 72 | static __inline__ int test_and_clear_bit(int nr, volatile unsigned long * addr) |
1da177e4 | 73 | { |
208151bf | 74 | unsigned long mask = BIT_MASK(nr); |
af5917f0 | 75 | unsigned long old; |
1da177e4 | 76 | unsigned long flags; |
af5917f0 | 77 | int set; |
1da177e4 | 78 | |
208151bf | 79 | addr += BIT_WORD(nr); |
1da177e4 | 80 | _atomic_spin_lock_irqsave(addr, flags); |
af5917f0 MW |
81 | old = *addr; |
82 | set = (old & mask) ? 1 : 0; | |
83 | if (set) | |
84 | *addr = old & ~mask; | |
1da177e4 LT |
85 | _atomic_spin_unlock_irqrestore(addr, flags); |
86 | ||
af5917f0 | 87 | return set; |
1da177e4 LT |
88 | } |
89 | ||
a366064c | 90 | static __inline__ int test_and_change_bit(int nr, volatile unsigned long * addr) |
1da177e4 | 91 | { |
208151bf | 92 | unsigned long mask = BIT_MASK(nr); |
a366064c | 93 | unsigned long oldbit; |
1da177e4 LT |
94 | unsigned long flags; |
95 | ||
208151bf | 96 | addr += BIT_WORD(nr); |
1da177e4 | 97 | _atomic_spin_lock_irqsave(addr, flags); |
a366064c GG |
98 | oldbit = *addr; |
99 | *addr = oldbit ^ mask; | |
1da177e4 LT |
100 | _atomic_spin_unlock_irqrestore(addr, flags); |
101 | ||
a366064c | 102 | return (oldbit & mask) ? 1 : 0; |
1da177e4 LT |
103 | } |
104 | ||
59e18a2e | 105 | #include <asm-generic/bitops/non-atomic.h> |
1da177e4 | 106 | |
1da177e4 LT |
107 | /** |
108 | * __ffs - find first bit in word. returns 0 to "BITS_PER_LONG-1". | |
109 | * @word: The word to search | |
110 | * | |
111 | * __ffs() return is undefined if no bit is set. | |
112 | * | |
113 | * 32-bit fast __ffs by LaMont Jones "lamont At hp com". | |
114 | * 64-bit enhancement by Grant Grundler "grundler At parisc-linux org". | |
115 | * (with help from willy/jejb to get the semantics right) | |
116 | * | |
117 | * This algorithm avoids branches by making use of nullification. | |
118 | * One side effect of "extr" instructions is it sets PSW[N] bit. | |
119 | * How PSW[N] (nullify next insn) gets set is determined by the | |
120 | * "condition" field (eg "<>" or "TR" below) in the extr* insn. | |
121 | * Only the 1st and one of either the 2cd or 3rd insn will get executed. | |
122 | * Each set of 3 insn will get executed in 2 cycles on PA8x00 vs 16 or so | |
123 | * cycles for each mispredicted branch. | |
124 | */ | |
125 | ||
126 | static __inline__ unsigned long __ffs(unsigned long x) | |
127 | { | |
128 | unsigned long ret; | |
129 | ||
130 | __asm__( | |
513e7ecd | 131 | #ifdef CONFIG_64BIT |
1da177e4 LT |
132 | " ldi 63,%1\n" |
133 | " extrd,u,*<> %0,63,32,%%r0\n" | |
134 | " extrd,u,*TR %0,31,32,%0\n" /* move top 32-bits down */ | |
135 | " addi -32,%1,%1\n" | |
136 | #else | |
137 | " ldi 31,%1\n" | |
138 | #endif | |
139 | " extru,<> %0,31,16,%%r0\n" | |
140 | " extru,TR %0,15,16,%0\n" /* xxxx0000 -> 0000xxxx */ | |
141 | " addi -16,%1,%1\n" | |
142 | " extru,<> %0,31,8,%%r0\n" | |
143 | " extru,TR %0,23,8,%0\n" /* 0000xx00 -> 000000xx */ | |
144 | " addi -8,%1,%1\n" | |
145 | " extru,<> %0,31,4,%%r0\n" | |
146 | " extru,TR %0,27,4,%0\n" /* 000000x0 -> 0000000x */ | |
147 | " addi -4,%1,%1\n" | |
148 | " extru,<> %0,31,2,%%r0\n" | |
149 | " extru,TR %0,29,2,%0\n" /* 0000000y, 1100b -> 0011b */ | |
150 | " addi -2,%1,%1\n" | |
151 | " extru,= %0,31,1,%%r0\n" /* check last bit */ | |
152 | " addi -1,%1,%1\n" | |
153 | : "+r" (x), "=r" (ret) ); | |
154 | return ret; | |
155 | } | |
156 | ||
59e18a2e | 157 | #include <asm-generic/bitops/ffz.h> |
1da177e4 LT |
158 | |
159 | /* | |
160 | * ffs: find first bit set. returns 1 to BITS_PER_LONG or 0 (if none set) | |
161 | * This is defined the same way as the libc and compiler builtin | |
162 | * ffs routines, therefore differs in spirit from the above ffz (man ffs). | |
163 | */ | |
164 | static __inline__ int ffs(int x) | |
165 | { | |
166 | return x ? (__ffs((unsigned long)x) + 1) : 0; | |
167 | } | |
168 | ||
169 | /* | |
170 | * fls: find last (most significant) bit set. | |
171 | * fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. | |
172 | */ | |
173 | ||
3fc2579e | 174 | static __inline__ int fls(unsigned int x) |
1da177e4 LT |
175 | { |
176 | int ret; | |
177 | if (!x) | |
178 | return 0; | |
179 | ||
180 | __asm__( | |
181 | " ldi 1,%1\n" | |
182 | " extru,<> %0,15,16,%%r0\n" | |
183 | " zdep,TR %0,15,16,%0\n" /* xxxx0000 */ | |
184 | " addi 16,%1,%1\n" | |
185 | " extru,<> %0,7,8,%%r0\n" | |
186 | " zdep,TR %0,23,24,%0\n" /* xx000000 */ | |
187 | " addi 8,%1,%1\n" | |
188 | " extru,<> %0,3,4,%%r0\n" | |
189 | " zdep,TR %0,27,28,%0\n" /* x0000000 */ | |
190 | " addi 4,%1,%1\n" | |
191 | " extru,<> %0,1,2,%%r0\n" | |
192 | " zdep,TR %0,29,30,%0\n" /* y0000000 (y&3 = 0) */ | |
193 | " addi 2,%1,%1\n" | |
194 | " extru,= %0,0,1,%%r0\n" | |
195 | " addi 1,%1,%1\n" /* if y & 8, add 1 */ | |
196 | : "+r" (x), "=r" (ret) ); | |
197 | ||
198 | return ret; | |
199 | } | |
200 | ||
56a6b1eb | 201 | #include <asm-generic/bitops/__fls.h> |
59e18a2e AM |
202 | #include <asm-generic/bitops/fls64.h> |
203 | #include <asm-generic/bitops/hweight.h> | |
26333576 | 204 | #include <asm-generic/bitops/lock.h> |
59e18a2e | 205 | #include <asm-generic/bitops/sched.h> |
861b5ae7 | 206 | #include <asm-generic/bitops/le.h> |
148817ba | 207 | #include <asm-generic/bitops/ext2-atomic-setbit.h> |
a366064c | 208 | |
1da177e4 | 209 | #endif /* _PARISC_BITOPS_H */ |