Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 4b664e73 authored by Matthew Wilcox's avatar Matthew Wilcox Committed by Linus Torvalds
Browse files

ia64: Rewrite atomic_add and atomic_sub



Force __builtin_constant_p to evaluate whether the argument to atomic_add
& atomic_sub is constant in the front-end before optimisations which
can lead GCC to output a call to __bad_increment_for_ia64_fetch_and_add().

See GCC bugzilla 83653.

Signed-off-by: default avatarJakub Jelinek <jakub@redhat.com>
Signed-off-by: default avatarMatthew Wilcox <mawilcox@microsoft.com>
Signed-off-by: default avatarTony Luck <tony.luck@intel.com>
Signed-off-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
parent 726ba84b
Loading
Loading
Loading
Loading
+19 −18
Original line number Diff line number Diff line
@@ -65,29 +65,30 @@ ia64_atomic_fetch_##op (int i, atomic_t *v) \
ATOMIC_OPS(add, +)
ATOMIC_OPS(sub, -)

#ifdef __OPTIMIZE__
#define __ia64_atomic_const(i)	__builtin_constant_p(i) ?		\
		((i) == 1 || (i) == 4 || (i) == 8 || (i) == 16 ||	\
		 (i) == -1 || (i) == -4 || (i) == -8 || (i) == -16) : 0

#define atomic_add_return(i, v)						\
({									\
	int __ia64_aar_i = (i);						\
	(__builtin_constant_p(i)					\
	 && (   (__ia64_aar_i ==  1) || (__ia64_aar_i ==   4)		\
	     || (__ia64_aar_i ==  8) || (__ia64_aar_i ==  16)		\
	     || (__ia64_aar_i == -1) || (__ia64_aar_i ==  -4)		\
	     || (__ia64_aar_i == -8) || (__ia64_aar_i == -16)))		\
		? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter)	\
		: ia64_atomic_add(__ia64_aar_i, v);			\
	int __i = (i);							\
	static const int __ia64_atomic_p = __ia64_atomic_const(i);	\
	__ia64_atomic_p ? ia64_fetch_and_add(__i, &(v)->counter) :	\
				ia64_atomic_add(__i, v);		\
})

#define atomic_sub_return(i, v)						\
({									\
	int __ia64_asr_i = (i);						\
	(__builtin_constant_p(i)					\
	 && (   (__ia64_asr_i ==   1) || (__ia64_asr_i ==   4)		\
	     || (__ia64_asr_i ==   8) || (__ia64_asr_i ==  16)		\
	     || (__ia64_asr_i ==  -1) || (__ia64_asr_i ==  -4)		\
	     || (__ia64_asr_i ==  -8) || (__ia64_asr_i == -16)))	\
		? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter)	\
		: ia64_atomic_sub(__ia64_asr_i, v);			\
	int __i = (i);							\
	static const int __ia64_atomic_p = __ia64_atomic_const(i);	\
	__ia64_atomic_p ? ia64_fetch_and_add(-__i, &(v)->counter) :	\
				ia64_atomic_sub(__i, v);		\
})
#else
#define atomic_add_return(i, v)	ia64_atomic_add(i, v)
#define atomic_sub_return(i, v)	ia64_atomic_sub(i, v)
#endif

#define atomic_fetch_add(i,v)						\
({									\