Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit b08ee5f7 authored by Borislav Petkov's avatar Borislav Petkov Committed by H. Peter Anvin
Browse files

x86: Simplify __HAVE_ARCH_CMPXCHG tests



Both the 32-bit and 64-bit cmpxchg.h header define __HAVE_ARCH_CMPXCHG
and there's ifdeffery which checks it. But since both bitness define it,
we can just as well move it up to the main cmpxchg header and simpify a
bit of code in doing that.

Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
Link: http://lkml.kernel.org/r/20140711104338.GB17083@pd.tnic


Signed-off-by: default avatarH. Peter Anvin <hpa@linux.intel.com>
parent 89171579
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -4,6 +4,8 @@
#include <linux/compiler.h>
#include <asm/alternative.h> /* Provides LOCK_PREFIX */

#define __HAVE_ARCH_CMPXCHG 1

/*
 * Non-existant functions to indicate usage errors at link time
 * (or compile-time if the compiler implements __compiletime_error().
@@ -143,7 +145,6 @@ extern void __add_wrong_size(void)
# include <asm/cmpxchg_64.h>
#endif

#ifdef __HAVE_ARCH_CMPXCHG
#define cmpxchg(ptr, old, new)						\
	__cmpxchg(ptr, old, new, sizeof(*(ptr)))

@@ -152,7 +153,6 @@ extern void __add_wrong_size(void)

#define cmpxchg_local(ptr, old, new)					\
	__cmpxchg_local(ptr, old, new, sizeof(*(ptr)))
#endif

/*
 * xadd() adds "inc" to "*ptr" and atomically returns the previous
+0 −2
Original line number Diff line number Diff line
@@ -34,8 +34,6 @@ static inline void set_64bit(volatile u64 *ptr, u64 value)
		     : "memory");
}

#define __HAVE_ARCH_CMPXCHG 1

#ifdef CONFIG_X86_CMPXCHG64
#define cmpxchg64(ptr, o, n)						\
	((__typeof__(*(ptr)))__cmpxchg64((ptr), (unsigned long long)(o), \
+0 −2
Original line number Diff line number Diff line
@@ -6,8 +6,6 @@ static inline void set_64bit(volatile u64 *ptr, u64 val)
	*ptr = val;
}

#define __HAVE_ARCH_CMPXCHG 1

#define cmpxchg64(ptr, o, n)						\
({									\
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
+1 −1
Original line number Diff line number Diff line
@@ -13,7 +13,7 @@
#define RTC_ALWAYS_BCD	1	/* RTC operates in binary mode */
#endif

#if defined(CONFIG_X86_32) && defined(__HAVE_ARCH_CMPXCHG)
#if defined(CONFIG_X86_32)
/*
 * This lock provides nmi access to the CMOS/RTC registers.  It has some
 * special properties.  It is owned by a CPU and stores the index register
+2 −14
Original line number Diff line number Diff line
@@ -100,23 +100,11 @@ do { \
static inline int __mutex_fastpath_trylock(atomic_t *count,
					   int (*fail_fn)(atomic_t *))
{
	/*
	 * We have two variants here. The cmpxchg based one is the best one
	 * because it never induce a false contention state.  It is included
	 * here because architectures using the inc/dec algorithms over the
	 * xchg ones are much more likely to support cmpxchg natively.
	 *
	 * If not we fall back to the spinlock based variant - that is
	 * just as efficient (and simpler) as a 'destructive' probing of
	 * the mutex state would be.
	 */
#ifdef __HAVE_ARCH_CMPXCHG
	/* cmpxchg because it never induces a false contention state. */
	if (likely(atomic_cmpxchg(count, 1, 0) == 1))
		return 1;

	return 0;
#else
	return fail_fn(count);
#endif
}

#endif /* _ASM_X86_MUTEX_32_H */
Loading