Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit ff72b7a6 authored by Ralf Baechle's avatar Ralf Baechle
Browse files

[MIPS] Fix smp barriers in test_and_{change,clear,set}_bit

parent e10e0cc8
Loading
Loading
Loading
Loading
+19 −32
Original line number Original line Diff line number Diff line
@@ -238,10 +238,11 @@ static inline int test_and_set_bit(unsigned long nr,
	volatile unsigned long *addr)
	volatile unsigned long *addr)
{
{
	unsigned short bit = nr & SZLONG_MASK;
	unsigned short bit = nr & SZLONG_MASK;
	unsigned long res;


	if (cpu_has_llsc && R10000_LLSC_WAR) {
	if (cpu_has_llsc && R10000_LLSC_WAR) {
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long temp, res;
		unsigned long temp;


		__asm__ __volatile__(
		__asm__ __volatile__(
		"	.set	mips3					\n"
		"	.set	mips3					\n"
@@ -254,11 +255,9 @@ static inline int test_and_set_bit(unsigned long nr,
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "r" (1UL << bit), "m" (*m)
		: "r" (1UL << bit), "m" (*m)
		: "memory");
		: "memory");

		return res != 0;
	} else if (cpu_has_llsc) {
	} else if (cpu_has_llsc) {
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long temp, res;
		unsigned long temp;


		__asm__ __volatile__(
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	push					\n"
@@ -277,25 +276,22 @@ static inline int test_and_set_bit(unsigned long nr,
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "r" (1UL << bit), "m" (*m)
		: "r" (1UL << bit), "m" (*m)
		: "memory");
		: "memory");

		return res != 0;
	} else {
	} else {
		volatile unsigned long *a = addr;
		volatile unsigned long *a = addr;
		unsigned long mask;
		unsigned long mask;
		int retval;
		unsigned long flags;
		unsigned long flags;


		a += nr >> SZLONG_LOG;
		a += nr >> SZLONG_LOG;
		mask = 1UL << bit;
		mask = 1UL << bit;
		raw_local_irq_save(flags);
		raw_local_irq_save(flags);
		retval = (mask & *a) != 0;
		res = (mask & *a);
		*a |= mask;
		*a |= mask;
		raw_local_irq_restore(flags);
		raw_local_irq_restore(flags);

		return retval;
	}
	}


	smp_mb();
	smp_mb();

	return res != 0;
}
}


/*
/*
@@ -310,6 +306,7 @@ static inline int test_and_clear_bit(unsigned long nr,
	volatile unsigned long *addr)
	volatile unsigned long *addr)
{
{
	unsigned short bit = nr & SZLONG_MASK;
	unsigned short bit = nr & SZLONG_MASK;
	unsigned long res;


	if (cpu_has_llsc && R10000_LLSC_WAR) {
	if (cpu_has_llsc && R10000_LLSC_WAR) {
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -327,12 +324,10 @@ static inline int test_and_clear_bit(unsigned long nr,
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "r" (1UL << bit), "m" (*m)
		: "r" (1UL << bit), "m" (*m)
		: "memory");
		: "memory");

		return res != 0;
#ifdef CONFIG_CPU_MIPSR2
#ifdef CONFIG_CPU_MIPSR2
	} else if (__builtin_constant_p(nr)) {
	} else if (__builtin_constant_p(nr)) {
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long temp, res;
		unsigned long temp;


		__asm__ __volatile__(
		__asm__ __volatile__(
		"1:	" __LL	"%0, %1		# test_and_clear_bit	\n"
		"1:	" __LL	"%0, %1		# test_and_clear_bit	\n"
@@ -346,12 +341,10 @@ static inline int test_and_clear_bit(unsigned long nr,
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "ri" (bit), "m" (*m)
		: "ri" (bit), "m" (*m)
		: "memory");
		: "memory");

		return res;
#endif
#endif
	} else if (cpu_has_llsc) {
	} else if (cpu_has_llsc) {
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long temp, res;
		unsigned long temp;


		__asm__ __volatile__(
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	push					\n"
@@ -371,25 +364,22 @@ static inline int test_and_clear_bit(unsigned long nr,
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "r" (1UL << bit), "m" (*m)
		: "r" (1UL << bit), "m" (*m)
		: "memory");
		: "memory");

		return res != 0;
	} else {
	} else {
		volatile unsigned long *a = addr;
		volatile unsigned long *a = addr;
		unsigned long mask;
		unsigned long mask;
		int retval;
		unsigned long flags;
		unsigned long flags;


		a += nr >> SZLONG_LOG;
		a += nr >> SZLONG_LOG;
		mask = 1UL << bit;
		mask = 1UL << bit;
		raw_local_irq_save(flags);
		raw_local_irq_save(flags);
		retval = (mask & *a) != 0;
		res = (mask & *a);
		*a &= ~mask;
		*a &= ~mask;
		raw_local_irq_restore(flags);
		raw_local_irq_restore(flags);

		return retval;
	}
	}


	smp_mb();
	smp_mb();

	return res != 0;
}
}


/*
/*
@@ -404,10 +394,11 @@ static inline int test_and_change_bit(unsigned long nr,
	volatile unsigned long *addr)
	volatile unsigned long *addr)
{
{
	unsigned short bit = nr & SZLONG_MASK;
	unsigned short bit = nr & SZLONG_MASK;
	unsigned long res;


	if (cpu_has_llsc && R10000_LLSC_WAR) {
	if (cpu_has_llsc && R10000_LLSC_WAR) {
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long temp, res;
		unsigned long temp;


		__asm__ __volatile__(
		__asm__ __volatile__(
		"	.set	mips3					\n"
		"	.set	mips3					\n"
@@ -420,11 +411,9 @@ static inline int test_and_change_bit(unsigned long nr,
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "r" (1UL << bit), "m" (*m)
		: "r" (1UL << bit), "m" (*m)
		: "memory");
		: "memory");

		return res != 0;
	} else if (cpu_has_llsc) {
	} else if (cpu_has_llsc) {
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
		unsigned long temp, res;
		unsigned long temp;


		__asm__ __volatile__(
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	push					\n"
@@ -443,24 +432,22 @@ static inline int test_and_change_bit(unsigned long nr,
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "=&r" (temp), "=m" (*m), "=&r" (res)
		: "r" (1UL << bit), "m" (*m)
		: "r" (1UL << bit), "m" (*m)
		: "memory");
		: "memory");

		return res != 0;
	} else {
	} else {
		volatile unsigned long *a = addr;
		volatile unsigned long *a = addr;
		unsigned long mask, retval;
		unsigned long mask;
		unsigned long flags;
		unsigned long flags;


		a += nr >> SZLONG_LOG;
		a += nr >> SZLONG_LOG;
		mask = 1UL << bit;
		mask = 1UL << bit;
		raw_local_irq_save(flags);
		raw_local_irq_save(flags);
		retval = (mask & *a) != 0;
		res = (mask & *a);
		*a ^= mask;
		*a ^= mask;
		raw_local_irq_restore(flags);
		raw_local_irq_restore(flags);

		return retval;
	}
	}


	smp_mb();
	smp_mb();

	return res != 0;
}
}


#include <asm-generic/bitops/non-atomic.h>
#include <asm-generic/bitops/non-atomic.h>