Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e779b2f9 authored by Akinobu Mita's avatar Akinobu Mita Committed by Linus Torvalds
Browse files

[PATCH] bitops: powerpc: use generic bitops



- remove __{,test_and_}{set,clear,change}_bit() and test_bit()
- remove generic_fls64()
- remove generic_hweight{64,32,16,8}()
- remove sched_find_first_bit()

Signed-off-by: default avatarAkinobu Mita <mita@miraclelinux.com>
Cc: Paul Mackerras <paulus@samba.org>
Cc: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Signed-off-by: default avatarAndrew Morton <akpm@osdl.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@osdl.org>
parent 59e18a2e
Loading
Loading
Loading
Loading
+4 −0
Original line number Original line Diff line number Diff line
@@ -37,6 +37,10 @@ config RWSEM_XCHGADD_ALGORITHM
	bool
	bool
	default y
	default y


config GENERIC_HWEIGHT
	bool
	default y

config GENERIC_CALIBRATE_DELAY
config GENERIC_CALIBRATE_DELAY
	bool
	bool
	default y
	default y
+4 −101
Original line number Original line Diff line number Diff line
@@ -184,72 +184,7 @@ static __inline__ void set_bits(unsigned long mask, unsigned long *addr)
	: "cc");
	: "cc");
}
}


/* Non-atomic versions */
#include <asm-generic/bitops/non-atomic.h>
static __inline__ int test_bit(unsigned long nr,
			       __const__ volatile unsigned long *addr)
{
	return 1UL & (addr[BITOP_WORD(nr)] >> (nr & (BITS_PER_LONG-1)));
}

static __inline__ void __set_bit(unsigned long nr,
				 volatile unsigned long *addr)
{
	unsigned long mask = BITOP_MASK(nr);
	unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);

	*p  |= mask;
}

static __inline__ void __clear_bit(unsigned long nr,
				   volatile unsigned long *addr)
{
	unsigned long mask = BITOP_MASK(nr);
	unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);

	*p &= ~mask;
}

static __inline__ void __change_bit(unsigned long nr,
				    volatile unsigned long *addr)
{
	unsigned long mask = BITOP_MASK(nr);
	unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);

	*p ^= mask;
}

static __inline__ int __test_and_set_bit(unsigned long nr,
					 volatile unsigned long *addr)
{
	unsigned long mask = BITOP_MASK(nr);
	unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
	unsigned long old = *p;

	*p = old | mask;
	return (old & mask) != 0;
}

static __inline__ int __test_and_clear_bit(unsigned long nr,
					   volatile unsigned long *addr)
{
	unsigned long mask = BITOP_MASK(nr);
	unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
	unsigned long old = *p;

	*p = old & ~mask;
	return (old & mask) != 0;
}

static __inline__ int __test_and_change_bit(unsigned long nr,
					    volatile unsigned long *addr)
{
	unsigned long mask = BITOP_MASK(nr);
	unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
	unsigned long old = *p;

	*p = old ^ mask;
	return (old & mask) != 0;
}


/*
/*
 * Return the zero-based bit position (LE, not IBM bit numbering) of
 * Return the zero-based bit position (LE, not IBM bit numbering) of
@@ -310,16 +245,9 @@ static __inline__ int fls(unsigned int x)
	asm ("cntlzw %0,%1" : "=r" (lz) : "r" (x));
	asm ("cntlzw %0,%1" : "=r" (lz) : "r" (x));
	return 32 - lz;
	return 32 - lz;
}
}
#define fls64(x)   generic_fls64(x)
#include <asm-generic/bitops/fls64.h>


/*
#include <asm-generic/bitops/hweight.h>
 * hweightN: returns the hamming weight (i.e. the number
 * of bits set) of a N-bit word
 */
#define hweight64(x) generic_hweight64(x)
#define hweight32(x) generic_hweight32(x)
#define hweight16(x) generic_hweight16(x)
#define hweight8(x) generic_hweight8(x)


#define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0)
#define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0)
unsigned long find_next_zero_bit(const unsigned long *addr,
unsigned long find_next_zero_bit(const unsigned long *addr,
@@ -397,32 +325,7 @@ unsigned long find_next_zero_le_bit(const unsigned long *addr,
#define minix_find_first_zero_bit(addr,size) \
#define minix_find_first_zero_bit(addr,size) \
	find_first_zero_le_bit((unsigned long *)addr, size)
	find_first_zero_le_bit((unsigned long *)addr, size)


/*
#include <asm-generic/bitops/sched.h>
 * Every architecture must define this function. It's the fastest
 * way of searching a 140-bit bitmap where the first 100 bits are
 * unlikely to be set. It's guaranteed that at least one of the 140
 * bits is cleared.
 */
static inline int sched_find_first_bit(const unsigned long *b)
{
#ifdef CONFIG_PPC64
	if (unlikely(b[0]))
		return __ffs(b[0]);
	if (unlikely(b[1]))
		return __ffs(b[1]) + 64;
	return __ffs(b[2]) + 128;
#else
	if (unlikely(b[0]))
		return __ffs(b[0]);
	if (unlikely(b[1]))
		return __ffs(b[1]) + 32;
	if (unlikely(b[2]))
		return __ffs(b[2]) + 64;
	if (b[3])
		return __ffs(b[3]) + 96;
	return __ffs(b[4]) + 128;
#endif
}


#endif /* __KERNEL__ */
#endif /* __KERNEL__ */