Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit c4559f67 authored by Maciej W. Rozycki's avatar Maciej W. Rozycki Committed by Ralf Baechle
Browse files

Always use ".set mips3" rather than select between "mips2" or "mips3"


for assembling ll/sc sequences to avoid problems with 64-bit
configurations.

Signed-off-by: default avatarRalf Baechle <ralf@linux-mips.org>
parent 69c75fb4
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -42,7 +42,7 @@ static inline int __sem_update_count(struct semaphore *sem, int incr)

	if (cpu_has_llsc && R10000_LLSC_WAR) {
		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%0, %2		# __sem_update_count	\n"
		"	sra	%1, %0, 31				\n"
		"	not	%1					\n"
@@ -55,7 +55,7 @@ static inline int __sem_update_count(struct semaphore *sem, int incr)
		: "r" (incr), "m" (sem->count));
	} else if (cpu_has_llsc) {
		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%0, %2		# __sem_update_count	\n"
		"	sra	%1, %0, 31				\n"
		"	not	%1					\n"
+10 −10
Original line number Diff line number Diff line
@@ -62,7 +62,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
		unsigned long temp;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%0, %1		# atomic_add		\n"
		"	addu	%0, %2					\n"
		"	sc	%0, %1					\n"
@@ -74,7 +74,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
		unsigned long temp;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%0, %1		# atomic_add		\n"
		"	addu	%0, %2					\n"
		"	sc	%0, %1					\n"
@@ -104,7 +104,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
		unsigned long temp;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%0, %1		# atomic_sub		\n"
		"	subu	%0, %2					\n"
		"	sc	%0, %1					\n"
@@ -116,7 +116,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
		unsigned long temp;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%0, %1		# atomic_sub		\n"
		"	subu	%0, %2					\n"
		"	sc	%0, %1					\n"
@@ -144,7 +144,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
		unsigned long temp;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%1, %2		# atomic_add_return	\n"
		"	addu	%0, %1, %3				\n"
		"	sc	%0, %2					\n"
@@ -159,7 +159,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
		unsigned long temp;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%1, %2		# atomic_add_return	\n"
		"	addu	%0, %1, %3				\n"
		"	sc	%0, %2					\n"
@@ -191,7 +191,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
		unsigned long temp;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%1, %2		# atomic_sub_return	\n"
		"	subu	%0, %1, %3				\n"
		"	sc	%0, %2					\n"
@@ -206,7 +206,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
		unsigned long temp;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%1, %2		# atomic_sub_return	\n"
		"	subu	%0, %1, %3				\n"
		"	sc	%0, %2					\n"
@@ -245,7 +245,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
		unsigned long temp;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%1, %2		# atomic_sub_if_positive\n"
		"	subu	%0, %1, %3				\n"
		"	bltz	%0, 1f					\n"
@@ -261,7 +261,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
		unsigned long temp;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%1, %2		# atomic_sub_if_positive\n"
		"	subu	%0, %1, %3				\n"
		"	bltz	%0, 1f					\n"
+12 −14
Original line number Diff line number Diff line
@@ -20,14 +20,12 @@
#define SZLONG_MASK 31UL
#define __LL		"ll	"
#define __SC		"sc	"
#define __SET_MIPS	".set	mips2	"
#define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x))
#elif (_MIPS_SZLONG == 64)
#define SZLONG_LOG 6
#define SZLONG_MASK 63UL
#define __LL		"lld	"
#define __SC		"scd	"
#define __SET_MIPS	".set	mips3	"
#define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x))
#endif

@@ -74,7 +72,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)

	if (cpu_has_llsc && R10000_LLSC_WAR) {
		__asm__ __volatile__(
		"	" __SET_MIPS "					\n"
		"	.set	mips3					\n"
		"1:	" __LL "%0, %1			# set_bit	\n"
		"	or	%0, %2					\n"
		"	" __SC	"%0, %1					\n"
@@ -84,7 +82,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
		: "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
	} else if (cpu_has_llsc) {
		__asm__ __volatile__(
		"	" __SET_MIPS "					\n"
		"	.set	mips3					\n"
		"1:	" __LL "%0, %1			# set_bit	\n"
		"	or	%0, %2					\n"
		"	" __SC	"%0, %1					\n"
@@ -138,7 +136,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)

	if (cpu_has_llsc && R10000_LLSC_WAR) {
		__asm__ __volatile__(
		"	" __SET_MIPS "					\n"
		"	.set	mips3					\n"
		"1:	" __LL "%0, %1			# clear_bit	\n"
		"	and	%0, %2					\n"
		"	" __SC "%0, %1					\n"
@@ -148,7 +146,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
		: "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m));
	} else if (cpu_has_llsc) {
		__asm__ __volatile__(
		"	" __SET_MIPS "					\n"
		"	.set	mips3					\n"
		"1:	" __LL "%0, %1			# clear_bit	\n"
		"	and	%0, %2					\n"
		"	" __SC "%0, %1					\n"
@@ -201,7 +199,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
		unsigned long temp;

		__asm__ __volatile__(
		"	" __SET_MIPS "				\n"
		"	.set	mips3				\n"
		"1:	" __LL "%0, %1		# change_bit	\n"
		"	xor	%0, %2				\n"
		"	" __SC	"%0, %1				\n"
@@ -214,7 +212,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
		unsigned long temp;

		__asm__ __volatile__(
		"	" __SET_MIPS "				\n"
		"	.set	mips3				\n"
		"1:	" __LL "%0, %1		# change_bit	\n"
		"	xor	%0, %2				\n"
		"	" __SC	"%0, %1				\n"
@@ -267,7 +265,7 @@ static inline int test_and_set_bit(unsigned long nr,
		unsigned long temp, res;

		__asm__ __volatile__(
		"	" __SET_MIPS "					\n"
		"	.set	mips3					\n"
		"1:	" __LL "%0, %1		# test_and_set_bit	\n"
		"	or	%2, %0, %3				\n"
		"	" __SC	"%2, %1					\n"
@@ -289,7 +287,7 @@ static inline int test_and_set_bit(unsigned long nr,
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	noreorder				\n"
		"	" __SET_MIPS "					\n"
		"	.set	mips3					\n"
		"1:	" __LL "%0, %1		# test_and_set_bit	\n"
		"	or	%2, %0, %3				\n"
		"	" __SC	"%2, %1					\n"
@@ -361,7 +359,7 @@ static inline int test_and_clear_bit(unsigned long nr,
		unsigned long temp, res;

		__asm__ __volatile__(
		"	" __SET_MIPS "					\n"
		"	.set	mips3					\n"
		"1:	" __LL	"%0, %1		# test_and_clear_bit	\n"
		"	or	%2, %0, %3				\n"
		"	xor	%2, %3					\n"
@@ -384,7 +382,7 @@ static inline int test_and_clear_bit(unsigned long nr,
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	noreorder				\n"
		"	" __SET_MIPS "					\n"
		"	.set	mips3					\n"
		"1:	" __LL	"%0, %1		# test_and_clear_bit	\n"
		"	or	%2, %0, %3				\n"
		"	xor	%2, %3					\n"
@@ -457,7 +455,7 @@ static inline int test_and_change_bit(unsigned long nr,
		unsigned long temp, res;

		__asm__ __volatile__(
		"	" __SET_MIPS "					\n"
		"	.set	mips3					\n"
		"1:	" __LL	"%0, %1		# test_and_change_bit	\n"
		"	xor	%2, %0, %3				\n"
		"	" __SC	"%2, %1					\n"
@@ -479,7 +477,7 @@ static inline int test_and_change_bit(unsigned long nr,
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	noreorder				\n"
		"	" __SET_MIPS "					\n"
		"	.set	mips3					\n"
		"1:	" __LL	"%0, %1		# test_and_change_bit	\n"
		"	xor	%2, %0, %3				\n"
		"	" __SC	"\t%2, %1				\n"
+5 −5
Original line number Diff line number Diff line
@@ -176,7 +176,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
		unsigned long dummy;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%0, %3			# xchg_u32	\n"
		"	move	%2, %z4					\n"
		"	sc	%2, %1					\n"
@@ -193,7 +193,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
		unsigned long dummy;

		__asm__ __volatile__(
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%0, %3			# xchg_u32	\n"
		"	move	%2, %z4					\n"
		"	sc	%2, %1					\n"
@@ -301,7 +301,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	noat					\n"
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%0, %2			# __cmpxchg_u32	\n"
		"	bne	%0, %z3, 2f				\n"
		"	move	$1, %z4					\n"
@@ -320,7 +320,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	noat					\n"
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	ll	%0, %2			# __cmpxchg_u32	\n"
		"	bne	%0, %z3, 2f				\n"
		"	move	$1, %z4					\n"
@@ -376,7 +376,7 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	noat					\n"
		"	.set	mips2					\n"
		"	.set	mips3					\n"
		"1:	lld	%0, %2			# __cmpxchg_u64	\n"
		"	bne	%0, %z3, 2f				\n"
		"	move	$1, %z4					\n"