Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e01961ce authored by Ralf Baechle's avatar Ralf Baechle
Browse files

MIPS: Remove further use of .subsection



7837314d [MIPS: Get rid of branches to
.subsections] removed most uses of .subsection] removed most uses of
.subsection in inline assembler code.

It left the instances in spinlock.h alone because we knew their use was
in fairly small files where .subsection use was fine but of course this
was a fragile assumption.  LTO breaks this assumption resulting in build
errors due to exceeded branch range, so remove further instances of
.subsection.

The two functions that still use .macro don't currently cause issues
however this use is still fragile.

Signed-off-by: default avatarRalf Baechle <ralf@linux-mips.org>
parent 9fcb7059
Loading
Loading
Loading
Loading
+44 −72
Original line number Diff line number Diff line
@@ -242,25 +242,16 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
		: "m" (rw->lock)
		: "memory");
	} else {
		do {
			__asm__ __volatile__(
		"	.set	noreorder	# arch_read_lock	\n"
		"1:	ll	%1, %2					\n"
		"	bltz	%1, 3f					\n"
			"1:	ll	%1, %2	# arch_read_lock	\n"
			"	bltz	%1, 1b				\n"
			"	 addu	%1, 1				\n"
			"2:	sc	%1, %0				\n"
		"	beqz	%1, 1b					\n"
		"	 nop						\n"
		"	.subsection 2					\n"
		"3:	ll	%1, %2					\n"
		"	bltz	%1, 3b					\n"
		"	 addu	%1, 1					\n"
		"	b	2b					\n"
		"	 nop						\n"
		"	.previous					\n"
		"	.set	reorder					\n"
			: "=m" (rw->lock), "=&r" (tmp)
			: "m" (rw->lock)
			: "memory");
		} while (unlikely(!tmp));
	}

	smp_llsc_mb();
@@ -285,21 +276,15 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
		: "m" (rw->lock)
		: "memory");
	} else {
		do {
			__asm__ __volatile__(
		"	.set	noreorder	# arch_read_unlock	\n"
		"1:	ll	%1, %2					\n"
			"1:	ll	%1, %2	# arch_read_unlock	\n"
			"	sub	%1, 1				\n"
			"	sc	%1, %0				\n"
		"	beqz	%1, 2f					\n"
		"	 nop						\n"
		"	.subsection 2					\n"
		"2:	b	1b					\n"
		"	 nop						\n"
		"	.previous					\n"
		"	.set	reorder					\n"
			: "=m" (rw->lock), "=&r" (tmp)
			: "m" (rw->lock)
			: "memory");
		} while (unlikely(!tmp));
	}
}

@@ -321,25 +306,16 @@ static inline void arch_write_lock(arch_rwlock_t *rw)
		: "m" (rw->lock)
		: "memory");
	} else {
		do {
			__asm__ __volatile__(
		"	.set	noreorder	# arch_write_lock	\n"
		"1:	ll	%1, %2					\n"
		"	bnez	%1, 3f					\n"
			"1:	ll	%1, %2	# arch_write_lock	\n"
			"	bnez	%1, 1b				\n"
			"	 lui	%1, 0x8000			\n"
			"2:	sc	%1, %0				\n"
		"	beqz	%1, 3f					\n"
		"	 nop						\n"
		"	.subsection 2					\n"
		"3:	ll	%1, %2					\n"
		"	bnez	%1, 3b					\n"
		"	 lui	%1, 0x8000				\n"
		"	b	2b					\n"
		"	 nop						\n"
		"	.previous					\n"
		"	.set	reorder					\n"
			: "=m" (rw->lock), "=&r" (tmp)
			: "m" (rw->lock)
			: "memory");
		} while (unlikely(!tmp));
	}

	smp_llsc_mb();
@@ -424,25 +400,21 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
		: "m" (rw->lock)
		: "memory");
	} else {
		do {
			__asm__ __volatile__(
		"	.set	noreorder	# arch_write_trylock	\n"
			"	ll	%1, %3	# arch_write_trylock	\n"
			"	li	%2, 0				\n"
		"1:	ll	%1, %3					\n"
			"	bnez	%1, 2f				\n"
			"	lui	%1, 0x8000			\n"
			"	sc	%1, %0				\n"
		"	beqz	%1, 3f					\n"
			"	li	%2, 1				\n"
			"2:						\n"
		__WEAK_LLSC_MB
		"	.subsection 2					\n"
		"3:	b	1b					\n"
		"	 li	%2, 0					\n"
		"	.previous					\n"
		"	.set	reorder					\n"
			: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
			: "m" (rw->lock)
			: "memory");
		} while (unlikely(!tmp));

		smp_llsc_mb();
	}

	return ret;