Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 71a66287 authored by Graf Yang's avatar Graf Yang Committed by Mike Frysinger
Browse files

Blackfin: SMP: rename the arch_xxx lock funcs to __raw_xxx



The external functions are named __raw_xxx, not arch_xxx, so rename the
prototypes to match reality.  This fixes some simple build errors in the
bfin_ksyms.c code which exports these helpers to modules.

Signed-off-by: default avatarGraf Yang <graf.yang@analog.com>
Signed-off-by: default avatarMike Frysinger <vapier@gentoo.org>
parent 57afb399
Loading
Loading
Loading
Loading
+3 −0
Original line number Original line Diff line number Diff line
@@ -11,6 +11,9 @@


#include <asm/blackfin.h>	/* for SSYNC() */
#include <asm/blackfin.h>	/* for SSYNC() */
#include <asm/sections.h>	/* for _ramend */
#include <asm/sections.h>	/* for _ramend */
#ifdef CONFIG_SMP
#include <asm/smp.h>
#endif


extern void blackfin_icache_flush_range(unsigned long start_address, unsigned long end_address);
extern void blackfin_icache_flush_range(unsigned long start_address, unsigned long end_address);
extern void blackfin_dcache_flush_range(unsigned long start_address, unsigned long end_address);
extern void blackfin_dcache_flush_range(unsigned long start_address, unsigned long end_address);
+12 −12
Original line number Original line Diff line number Diff line
@@ -17,12 +17,12 @@ asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr);
asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
asmlinkage void arch_read_lock_asm(volatile int *ptr);
asmlinkage void __raw_read_lock_asm(volatile int *ptr);
asmlinkage int arch_read_trylock_asm(volatile int *ptr);
asmlinkage int __raw_read_trylock_asm(volatile int *ptr);
asmlinkage void arch_read_unlock_asm(volatile int *ptr);
asmlinkage void __raw_read_unlock_asm(volatile int *ptr);
asmlinkage void arch_write_lock_asm(volatile int *ptr);
asmlinkage void __raw_write_lock_asm(volatile int *ptr);
asmlinkage int arch_write_trylock_asm(volatile int *ptr);
asmlinkage int __raw_write_trylock_asm(volatile int *ptr);
asmlinkage void arch_write_unlock_asm(volatile int *ptr);
asmlinkage void __raw_write_unlock_asm(volatile int *ptr);


static inline int arch_spin_is_locked(arch_spinlock_t *lock)
static inline int arch_spin_is_locked(arch_spinlock_t *lock)
{
{
@@ -64,32 +64,32 @@ static inline int arch_write_can_lock(arch_rwlock_t *rw)


static inline void arch_read_lock(arch_rwlock_t *rw)
static inline void arch_read_lock(arch_rwlock_t *rw)
{
{
	arch_read_lock_asm(&rw->lock);
	__raw_read_lock_asm(&rw->lock);
}
}


static inline int arch_read_trylock(arch_rwlock_t *rw)
static inline int arch_read_trylock(arch_rwlock_t *rw)
{
{
	return arch_read_trylock_asm(&rw->lock);
	return __raw_read_trylock_asm(&rw->lock);
}
}


static inline void arch_read_unlock(arch_rwlock_t *rw)
static inline void arch_read_unlock(arch_rwlock_t *rw)
{
{
	arch_read_unlock_asm(&rw->lock);
	__raw_read_unlock_asm(&rw->lock);
}
}


static inline void arch_write_lock(arch_rwlock_t *rw)
static inline void arch_write_lock(arch_rwlock_t *rw)
{
{
	arch_write_lock_asm(&rw->lock);
	__raw_write_lock_asm(&rw->lock);
}
}


static inline int arch_write_trylock(arch_rwlock_t *rw)
static inline int arch_write_trylock(arch_rwlock_t *rw)
{
{
	return arch_write_trylock_asm(&rw->lock);
	return __raw_write_trylock_asm(&rw->lock);
}
}


static inline void arch_write_unlock(arch_rwlock_t *rw)
static inline void arch_write_unlock(arch_rwlock_t *rw)
{
{
	arch_write_unlock_asm(&rw->lock);
	__raw_write_unlock_asm(&rw->lock);
}
}


#define arch_spin_relax(lock)  	cpu_relax()
#define arch_spin_relax(lock)  	cpu_relax()