Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit a4c1887d authored by Will Deacon's avatar Will Deacon Committed by Ingo Molnar
Browse files

locking/arch: Remove dummy arch_{read,spin,write}_lock_flags() implementations



The arch_{read,spin,write}_lock_flags() macros are simply mapped to the
non-flags versions by the majority of architectures, so do this in core
code and remove the dummy implementations. Also remove the implementation
in spinlock_up.h, since all callers of do_raw_spin_lock_flags() call
local_irq_save(flags) anyway.

Signed-off-by: default avatarWill Deacon <will.deacon@arm.com>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: paulmck@linux.vnet.ibm.com
Link: http://lkml.kernel.org/r/1507055129-12300-4-git-send-email-will.deacon@arm.com


Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 0160fb17
Loading
Loading
Loading
Loading
+0 −4
Original line number Original line Diff line number Diff line
@@ -13,7 +13,6 @@
 * We make no fairness assumptions. They have a cost.
 * We make no fairness assumptions. They have a cost.
 */
 */


#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
#define arch_spin_is_locked(x)	((x)->lock != 0)
#define arch_spin_is_locked(x)	((x)->lock != 0)


static inline int arch_spin_value_unlocked(arch_spinlock_t lock)
static inline int arch_spin_value_unlocked(arch_spinlock_t lock)
@@ -160,7 +159,4 @@ static inline void arch_write_unlock(arch_rwlock_t * lock)
	lock->lock = 0;
	lock->lock = 0;
}
}


#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
#define arch_write_lock_flags(lock, flags) arch_write_lock(lock)

#endif /* _ALPHA_SPINLOCK_H */
#endif /* _ALPHA_SPINLOCK_H */
+0 −4
Original line number Original line Diff line number Diff line
@@ -14,7 +14,6 @@
#include <asm/barrier.h>
#include <asm/barrier.h>


#define arch_spin_is_locked(x)	((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__)
#define arch_spin_is_locked(x)	((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__)
#define arch_spin_lock_flags(lock, flags)	arch_spin_lock(lock)


#ifdef CONFIG_ARC_HAS_LLSC
#ifdef CONFIG_ARC_HAS_LLSC


@@ -410,7 +409,4 @@ static inline void arch_write_unlock(arch_rwlock_t *rw)


#endif
#endif


#define arch_read_lock_flags(lock, flags)	arch_read_lock(lock)
#define arch_write_lock_flags(lock, flags)	arch_write_lock(lock)

#endif /* __ASM_SPINLOCK_H */
#endif /* __ASM_SPINLOCK_H */
+0 −5
Original line number Original line Diff line number Diff line
@@ -52,8 +52,6 @@ static inline void dsb_sev(void)
 * memory.
 * memory.
 */
 */


#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)

static inline void arch_spin_lock(arch_spinlock_t *lock)
static inline void arch_spin_lock(arch_spinlock_t *lock)
{
{
	unsigned long tmp;
	unsigned long tmp;
@@ -270,7 +268,4 @@ static inline int arch_read_trylock(arch_rwlock_t *rw)
	}
	}
}
}


#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
#define arch_write_lock_flags(lock, flags) arch_write_lock(lock)

#endif /* __ASM_SPINLOCK_H */
#endif /* __ASM_SPINLOCK_H */
+0 −5
Original line number Original line Diff line number Diff line
@@ -27,8 +27,6 @@
 * instructions.
 * instructions.
 */
 */


#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)

static inline void arch_spin_lock(arch_spinlock_t *lock)
static inline void arch_spin_lock(arch_spinlock_t *lock)
{
{
	unsigned int tmp;
	unsigned int tmp;
@@ -303,9 +301,6 @@ static inline int arch_read_trylock(arch_rwlock_t *rw)
/* read_can_lock - would read_trylock() succeed? */
/* read_can_lock - would read_trylock() succeed? */
#define arch_read_can_lock(x)		((x)->lock < 0x80000000)
#define arch_read_can_lock(x)		((x)->lock < 0x80000000)


#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
#define arch_write_lock_flags(lock, flags) arch_write_lock(lock)

/* See include/linux/spinlock.h */
/* See include/linux/spinlock.h */
#define smp_mb__after_spinlock()	smp_mb()
#define smp_mb__after_spinlock()	smp_mb()


+0 −6
Original line number Original line Diff line number Diff line
@@ -36,8 +36,6 @@ static inline void arch_spin_lock(arch_spinlock_t *lock)
	__raw_spin_lock_asm(&lock->lock);
	__raw_spin_lock_asm(&lock->lock);
}
}


#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)

static inline int arch_spin_trylock(arch_spinlock_t *lock)
static inline int arch_spin_trylock(arch_spinlock_t *lock)
{
{
	return __raw_spin_trylock_asm(&lock->lock);
	return __raw_spin_trylock_asm(&lock->lock);
@@ -53,8 +51,6 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
	__raw_read_lock_asm(&rw->lock);
	__raw_read_lock_asm(&rw->lock);
}
}


#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)

static inline int arch_read_trylock(arch_rwlock_t *rw)
static inline int arch_read_trylock(arch_rwlock_t *rw)
{
{
	return __raw_read_trylock_asm(&rw->lock);
	return __raw_read_trylock_asm(&rw->lock);
@@ -70,8 +66,6 @@ static inline void arch_write_lock(arch_rwlock_t *rw)
	__raw_write_lock_asm(&rw->lock);
	__raw_write_lock_asm(&rw->lock);
}
}


#define arch_write_lock_flags(lock, flags) arch_write_lock(lock)

static inline int arch_write_trylock(arch_rwlock_t *rw)
static inline int arch_write_trylock(arch_rwlock_t *rw)
{
{
	return __raw_write_trylock_asm(&rw->lock);
	return __raw_write_trylock_asm(&rw->lock);
Loading