__constant_test_and_set_bit(nr, vaddr) : \
__generic_test_and_set_bit(nr, vaddr))
-extern __inline__ int __constant_test_and_set_bit(int nr,volatile void * vaddr)
+extern __inline__ int __constant_test_and_set_bit(int nr,volatile unsigned long * vaddr)
{
char retval;
return retval;
}
-extern __inline__ int __generic_test_and_set_bit(int nr,volatile void * vaddr)
+extern __inline__ int __generic_test_and_set_bit(int nr,volatile unsigned long * vaddr)
{
char retval;
#define __set_bit(nr,vaddr) set_bit(nr,vaddr)
-extern __inline__ void __constant_set_bit(int nr, volatile void * vaddr)
+extern __inline__ void __constant_set_bit(int nr, volatile unsigned long * vaddr)
{
__asm__ __volatile__ ("bset %1,%0"
: "+m" (((volatile char *)vaddr)[(nr^31) >> 3]) : "di" (nr & 7));
}
-extern __inline__ void __generic_set_bit(int nr, volatile void * vaddr)
+extern __inline__ void __generic_set_bit(int nr, volatile unsigned long * vaddr)
{
__asm__ __volatile__ ("bfset %1@{%0:#1}"
: : "d" (nr^31), "a" (vaddr) : "memory");
#define __test_and_clear_bit(nr,vaddr) test_and_clear_bit(nr,vaddr)
-extern __inline__ int __constant_test_and_clear_bit(int nr, volatile void * vaddr)
+extern __inline__ int __constant_test_and_clear_bit(int nr, volatile unsigned long * vaddr)
{
char retval;
return retval;
}
-extern __inline__ int __generic_test_and_clear_bit(int nr, volatile void * vaddr)
+extern __inline__ int __generic_test_and_clear_bit(int nr, volatile unsigned long * vaddr)
{
char retval;
__generic_clear_bit(nr, vaddr))
#define __clear_bit(nr,vaddr) clear_bit(nr,vaddr)
-extern __inline__ void __constant_clear_bit(int nr, volatile void * vaddr)
+extern __inline__ void __constant_clear_bit(int nr, volatile unsigned long * vaddr)
{
__asm__ __volatile__ ("bclr %1,%0"
: "+m" (((volatile char *)vaddr)[(nr^31) >> 3]) : "di" (nr & 7));
}
-extern __inline__ void __generic_clear_bit(int nr, volatile void * vaddr)
+extern __inline__ void __generic_clear_bit(int nr, volatile unsigned long * vaddr)
{
__asm__ __volatile__ ("bfclr %1@{%0:#1}"
: : "d" (nr^31), "a" (vaddr) : "memory");
#define __test_and_change_bit(nr,vaddr) test_and_change_bit(nr,vaddr)
#define __change_bit(nr,vaddr) change_bit(nr,vaddr)
-extern __inline__ int __constant_test_and_change_bit(int nr, volatile void * vaddr)
+extern __inline__ int __constant_test_and_change_bit(int nr, volatile unsigned long * vaddr)
{
char retval;
return retval;
}
-extern __inline__ int __generic_test_and_change_bit(int nr, volatile void * vaddr)
+extern __inline__ int __generic_test_and_change_bit(int nr, volatile unsigned long * vaddr)
{
char retval;
__constant_change_bit(nr, vaddr) : \
__generic_change_bit(nr, vaddr))
-extern __inline__ void __constant_change_bit(int nr, volatile void * vaddr)
+extern __inline__ void __constant_change_bit(int nr, volatile unsigned long * vaddr)
{
__asm__ __volatile__ ("bchg %1,%0"
: "+m" (((volatile char *)vaddr)[(nr^31) >> 3]) : "di" (nr & 7));
}
-extern __inline__ void __generic_change_bit(int nr, volatile void * vaddr)
+extern __inline__ void __generic_change_bit(int nr, volatile unsigned long * vaddr)
{
__asm__ __volatile__ ("bfchg %1@{%0:#1}"
: : "d" (nr^31), "a" (vaddr) : "memory");
}
-extern __inline__ int test_bit(int nr, const volatile void * vaddr)
+extern __inline__ int test_bit(int nr, const volatile unsigned long * vaddr)
{
- return ((1UL << (nr & 31)) & (((const volatile unsigned int *) vaddr)[nr >> 5])) != 0;
+ return ((1UL << (nr & 31)) & (((const volatile unsigned long *) vaddr)[nr >> 5])) != 0;
}
-extern __inline__ int find_first_zero_bit(void * vaddr, unsigned size)
+extern __inline__ int find_first_zero_bit(unsigned long * vaddr, unsigned size)
{
unsigned long *p = vaddr, *addr = vaddr;
unsigned long allones = ~0UL;
return ((p - addr) << 5) + (res ^ 31);
}
-extern __inline__ int find_next_zero_bit (void *vaddr, int size,
+extern __inline__ int find_next_zero_bit (unsigned long *vaddr, int size,
int offset)
{
unsigned long *addr = vaddr;
* differs in spirit from the above ffz (man ffs).
*/
-extern __inline__ int ffs(int x)
+static inline int ffs(int x)
{
int cnt;
- __asm__ __volatile__("bfffo %1{#0:#0},%0" : "=d" (cnt) : "dm" (x & -x));
+ asm ("bfffo %1{#0:#0},%0" : "=d" (cnt) : "dm" (x & -x));
return 32 - cnt;
}
#define __ffs(x) (ffs(x) - 1)
+/*
+ * fls: find last bit set.
+ */
+
+static inline int fls(int x)
+{
+ int cnt;
+
+ asm ("bfffo %1{#0,#0},%0" : "=d" (cnt) : "dm" (x));
+
+ return 32 - cnt;
+}
/*
* Every architecture must define this function. It's the fastest