Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 00b3aa3f authored by Paul Mundt's avatar Paul Mundt
Browse files

sh: xchg()/__xchg() always_inline fixes for gcc4.



Make __xchg() a macro, so that gcc 4.0 doesn't blow up thanks to
always_inline..

Signed-off-by: default avatarPaul Mundt <lethal@linux-sh.org>
parent bc8bff63
Loading
Loading
Loading
Loading
+32 −25
Original line number Diff line number Diff line
@@ -79,10 +79,8 @@ static inline void sched_cacheflush(void)
}
#endif

#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))

static __inline__ unsigned long tas(volatile int *m)
{ /* #define tas(ptr) (xchg((ptr),1)) */
{
	unsigned long retval;

	__asm__ __volatile__ ("tas.b	@%1\n\t"
@@ -91,8 +89,6 @@ static __inline__ unsigned long tas(volatile int *m)
	return retval;
}

extern void __xchg_called_with_bad_pointer(void);

/*
 * A brief note on ctrl_barrier(), the control register write barrier.
 *
@@ -272,7 +268,7 @@ do { \
/* For spinlocks etc */
#define local_irq_save(x)	x = local_irq_save()

static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
{
	unsigned long flags, retval;

@@ -283,7 +279,7 @@ static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
	return retval;
}

static __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned long val)
static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
{
	unsigned long flags, retval;

@@ -294,19 +290,30 @@ static __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned lon
	return retval;
}

static __inline__ unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
{
	switch (size) {
	case 4:
		return xchg_u32(ptr, x);
		break;
	case 1:
		return xchg_u8(ptr, x);
		break;
	}
	__xchg_called_with_bad_pointer();
	return x;
}
extern void __xchg_called_with_bad_pointer(void);

#define __xchg(ptr, x, size)				\
({							\
	unsigned long __xchg__res;			\
	volatile void *__xchg_ptr = (ptr);		\
	switch (size) {					\
	case 4:						\
		__xchg__res = xchg_u32(__xchg_ptr, x);	\
		break;					\
	case 1:						\
		__xchg__res = xchg_u8(__xchg_ptr, x);	\
		break;					\
	default:					\
		__xchg_called_with_bad_pointer();	\
		__xchg__res = x;			\
		break;					\
	}						\
							\
	__xchg__res;					\
})

#define xchg(ptr,x)	\
	((__typeof__(*(ptr)))__xchg((ptr),(unsigned long)(x), sizeof(*(ptr))))

static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
	unsigned long new)