Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 2823d4da authored by H. Peter Anvin's avatar H. Peter Anvin Committed by H. Peter Anvin
Browse files

x86, bitops: remove use of "sbb" to return CF



Use SETC instead of SBB to return the value of CF from assembly. Using
SETcc enables uniformity with other flags-returning pieces of assembly
code.

Signed-off-by: default avatarH. Peter Anvin <hpa@zytor.com>
Link: http://lkml.kernel.org/r/1465414726-197858-2-git-send-email-hpa@linux.intel.com


Reviewed-by: default avatarAndy Lutomirski <luto@kernel.org>
Reviewed-by: default avatarBorislav Petkov <bp@suse.de>
Acked-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
parent f5967101
Loading
Loading
Loading
Loading
+12 −12
Original line number Diff line number Diff line
@@ -230,11 +230,11 @@ test_and_set_bit_lock(long nr, volatile unsigned long *addr)
 */
static __always_inline int __test_and_set_bit(long nr, volatile unsigned long *addr)
{
	int oldbit;
	unsigned char oldbit;

	asm("bts %2,%1\n\t"
	    "sbb %0,%0"
	    : "=r" (oldbit), ADDR
	    "setc %0"
	    : "=qm" (oldbit), ADDR
	    : "Ir" (nr));
	return oldbit;
}
@@ -270,11 +270,11 @@ static __always_inline int test_and_clear_bit(long nr, volatile unsigned long *a
 */
static __always_inline int __test_and_clear_bit(long nr, volatile unsigned long *addr)
{
	int oldbit;
	unsigned char oldbit;

	asm volatile("btr %2,%1\n\t"
		     "sbb %0,%0"
		     : "=r" (oldbit), ADDR
		     "setc %0"
		     : "=qm" (oldbit), ADDR
		     : "Ir" (nr));
	return oldbit;
}
@@ -282,11 +282,11 @@ static __always_inline int __test_and_clear_bit(long nr, volatile unsigned long
/* WARNING: non atomic and it can be reordered! */
static __always_inline int __test_and_change_bit(long nr, volatile unsigned long *addr)
{
	int oldbit;
	unsigned char oldbit;

	asm volatile("btc %2,%1\n\t"
		     "sbb %0,%0"
		     : "=r" (oldbit), ADDR
		     "setc %0"
		     : "=qm" (oldbit), ADDR
		     : "Ir" (nr) : "memory");

	return oldbit;
@@ -313,11 +313,11 @@ static __always_inline int constant_test_bit(long nr, const volatile unsigned lo

static __always_inline int variable_test_bit(long nr, volatile const unsigned long *addr)
{
	int oldbit;
	unsigned char oldbit;

	asm volatile("bt %2,%1\n\t"
		     "sbb %0,%0"
		     : "=r" (oldbit)
		     "setc %0"
		     : "=qm" (oldbit)
		     : "m" (*(unsigned long *)addr), "Ir" (nr));

	return oldbit;
+6 −6
Original line number Diff line number Diff line
@@ -510,9 +510,9 @@ do { \
/* This is not atomic against other CPUs -- CPU preemption needs to be off */
#define x86_test_and_clear_bit_percpu(bit, var)				\
({									\
	int old__;							\
	asm volatile("btr %2,"__percpu_arg(1)"\n\tsbbl %0,%0"		\
		     : "=r" (old__), "+m" (var)				\
	unsigned char old__;						\
	asm volatile("btr %2,"__percpu_arg(1)"\n\tsetc %0"		\
		     : "=qm" (old__), "+m" (var)			\
		     : "dIr" (bit));					\
	old__;								\
})
@@ -532,11 +532,11 @@ static __always_inline int x86_this_cpu_constant_test_bit(unsigned int nr,
static inline int x86_this_cpu_variable_test_bit(int nr,
                        const unsigned long __percpu *addr)
{
	int oldbit;
	unsigned char oldbit;

	asm volatile("bt "__percpu_arg(2)",%1\n\t"
			"sbb %0,%0"
			: "=r" (oldbit)
			"setc %0"
			: "=qm" (oldbit)
			: "m" (*(unsigned long *)addr), "Ir" (nr));

	return oldbit;
+3 −3
Original line number Diff line number Diff line
@@ -81,9 +81,9 @@ static inline int __const_sigismember(sigset_t *set, int _sig)

static inline int __gen_sigismember(sigset_t *set, int _sig)
{
	int ret;
	asm("btl %2,%1\n\tsbbl %0,%0"
	    : "=r"(ret) : "m"(*set), "Ir"(_sig-1) : "cc");
	unsigned char ret;
	asm("btl %2,%1\n\tsetc %0"
	    : "=qm"(ret) : "m"(*set), "Ir"(_sig-1) : "cc");
	return ret;
}

+9 −9
Original line number Diff line number Diff line
@@ -79,10 +79,10 @@ static inline void sync_change_bit(long nr, volatile unsigned long *addr)
 */
static inline int sync_test_and_set_bit(long nr, volatile unsigned long *addr)
{
	int oldbit;
	unsigned char oldbit;

	asm volatile("lock; bts %2,%1\n\tsbbl %0,%0"
		     : "=r" (oldbit), "+m" (ADDR)
	asm volatile("lock; bts %2,%1\n\tsetc %0"
		     : "=qm" (oldbit), "+m" (ADDR)
		     : "Ir" (nr) : "memory");
	return oldbit;
}
@@ -97,10 +97,10 @@ static inline int sync_test_and_set_bit(long nr, volatile unsigned long *addr)
 */
static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr)
{
	int oldbit;
	unsigned char oldbit;

	asm volatile("lock; btr %2,%1\n\tsbbl %0,%0"
		     : "=r" (oldbit), "+m" (ADDR)
	asm volatile("lock; btr %2,%1\n\tsetc %0"
		     : "=qm" (oldbit), "+m" (ADDR)
		     : "Ir" (nr) : "memory");
	return oldbit;
}
@@ -115,10 +115,10 @@ static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr)
 */
static inline int sync_test_and_change_bit(long nr, volatile unsigned long *addr)
{
	int oldbit;
	unsigned char oldbit;

	asm volatile("lock; btc %2,%1\n\tsbbl %0,%0"
		     : "=r" (oldbit), "+m" (ADDR)
	asm volatile("lock; btc %2,%1\n\tsetc %0"
		     : "=qm" (oldbit), "+m" (ADDR)
		     : "Ir" (nr) : "memory");
	return oldbit;
}
+1 −4
Original line number Diff line number Diff line
@@ -440,10 +440,7 @@ static inline unsigned long get_vflags(struct kernel_vm86_regs *regs)

static inline int is_revectored(int nr, struct revectored_struct *bitmap)
{
	__asm__ __volatile__("btl %2,%1\n\tsbbl %0,%0"
		:"=r" (nr)
		:"m" (*bitmap), "r" (nr));
	return nr;
	return test_bit(nr, bitmap->__map);
}

#define val_byte(val, n) (((__u8 *)&val)[n])