Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 24f287e4 authored by David S. Miller's avatar David S. Miller
Browse files

[SPARC64]: Implement atomic backoff.



When the cpu count is high and contention hits an atomic object, the
processors can synchronize such that some cpus continually get knocked
out and cannot complete the atomic update.

So implement an exponential backoff when SMP.

Signed-off-by: default avatarDavid S. Miller <davem@davemloft.net>
parent d85714d8
Loading
Loading
Loading
Loading
+27 −11
Original line number Diff line number Diff line
/* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $
 * atomic.S: These things are too big to do inline.
/* atomic.S: These things are too big to do inline.
 *
 * Copyright (C) 1999 David S. Miller (davem@redhat.com)
 * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
 */

#include <asm/asi.h>
#include <asm/backoff.h>

	.text

@@ -16,27 +16,31 @@
	.globl	atomic_add
	.type	atomic_add,#function
atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	lduw	[%o1], %g1
	add	%g1, %o0, %g7
	cas	[%o1], %g1, %g7
	cmp	%g1, %g7
	bne,pn	%icc, 1b
	bne,pn	%icc, 2f
	 nop
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic_add, .-atomic_add

	.globl	atomic_sub
	.type	atomic_sub,#function
atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	lduw	[%o1], %g1
	sub	%g1, %o0, %g7
	cas	[%o1], %g1, %g7
	cmp	%g1, %g7
	bne,pn	%icc, 1b
	bne,pn	%icc, 2f
	 nop
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic_sub, .-atomic_sub

	/* On SMP we need to use memory barriers to ensure
@@ -60,89 +64,101 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
	.globl	atomic_add_ret
	.type	atomic_add_ret,#function
atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
	ATOMIC_PRE_BARRIER
1:	lduw	[%o1], %g1
	add	%g1, %o0, %g7
	cas	[%o1], %g1, %g7
	cmp	%g1, %g7
	bne,pn	%icc, 1b
	bne,pn	%icc, 2f
	 add	%g7, %o0, %g7
	sra	%g7, 0, %o0
	ATOMIC_POST_BARRIER
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic_add_ret, .-atomic_add_ret

	.globl	atomic_sub_ret
	.type	atomic_sub_ret,#function
atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
	ATOMIC_PRE_BARRIER
1:	lduw	[%o1], %g1
	sub	%g1, %o0, %g7
	cas	[%o1], %g1, %g7
	cmp	%g1, %g7
	bne,pn	%icc, 1b
	bne,pn	%icc, 2f
	 sub	%g7, %o0, %g7
	sra	%g7, 0, %o0
	ATOMIC_POST_BARRIER
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic_sub_ret, .-atomic_sub_ret

	.globl	atomic64_add
	.type	atomic64_add,#function
atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	ldx	[%o1], %g1
	add	%g1, %o0, %g7
	casx	[%o1], %g1, %g7
	cmp	%g1, %g7
	bne,pn	%xcc, 1b
	bne,pn	%xcc, 2f
	 nop
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic64_add, .-atomic64_add

	.globl	atomic64_sub
	.type	atomic64_sub,#function
atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	ldx	[%o1], %g1
	sub	%g1, %o0, %g7
	casx	[%o1], %g1, %g7
	cmp	%g1, %g7
	bne,pn	%xcc, 1b
	bne,pn	%xcc, 2f
	 nop
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic64_sub, .-atomic64_sub

	.globl	atomic64_add_ret
	.type	atomic64_add_ret,#function
atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
	ATOMIC_PRE_BARRIER
1:	ldx	[%o1], %g1
	add	%g1, %o0, %g7
	casx	[%o1], %g1, %g7
	cmp	%g1, %g7
	bne,pn	%xcc, 1b
	bne,pn	%xcc, 2f
	 add	%g7, %o0, %g7
	mov	%g7, %o0
	ATOMIC_POST_BARRIER
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic64_add_ret, .-atomic64_add_ret

	.globl	atomic64_sub_ret
	.type	atomic64_sub_ret,#function
atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
	ATOMIC_PRE_BARRIER
1:	ldx	[%o1], %g1
	sub	%g1, %o0, %g7
	casx	[%o1], %g1, %g7
	cmp	%g1, %g7
	bne,pn	%xcc, 1b
	bne,pn	%xcc, 2f
	 sub	%g7, %o0, %g7
	mov	%g7, %o0
	ATOMIC_POST_BARRIER
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic64_sub_ret, .-atomic64_sub_ret
+21 −9
Original line number Diff line number Diff line
/* $Id: bitops.S,v 1.3 2001/11/18 00:12:56 davem Exp $
 * bitops.S: Sparc64 atomic bit operations.
/* bitops.S: Sparc64 atomic bit operations.
 *
 * Copyright (C) 2000 David S. Miller (davem@redhat.com)
 * Copyright (C) 2000, 2007 David S. Miller (davem@davemloft.net)
 */

#include <asm/asi.h>
#include <asm/backoff.h>

	.text

@@ -29,6 +29,7 @@
	.globl	test_and_set_bit
	.type	test_and_set_bit,#function
test_and_set_bit:	/* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	BITOP_PRE_BARRIER
	srlx	%o0, 6, %g1
	mov	1, %o2
@@ -40,18 +41,20 @@ test_and_set_bit: /* %o0=nr, %o1=addr */
	or	%g7, %o2, %g1
	casx	[%o1], %g7, %g1
	cmp	%g7, %g1
	bne,pn	%xcc, 1b
	bne,pn	%xcc, 2f
	 and	%g7, %o2, %g2
	clr	%o0
	movrne	%g2, 1, %o0
	BITOP_POST_BARRIER
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	test_and_set_bit, .-test_and_set_bit

	.globl	test_and_clear_bit
	.type	test_and_clear_bit,#function
test_and_clear_bit:	/* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	BITOP_PRE_BARRIER
	srlx	%o0, 6, %g1
	mov	1, %o2
@@ -63,18 +66,20 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */
	andn	%g7, %o2, %g1
	casx	[%o1], %g7, %g1
	cmp	%g7, %g1
	bne,pn	%xcc, 1b
	bne,pn	%xcc, 2f
	 and	%g7, %o2, %g2
	clr	%o0
	movrne	%g2, 1, %o0
	BITOP_POST_BARRIER
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	test_and_clear_bit, .-test_and_clear_bit

	.globl	test_and_change_bit
	.type	test_and_change_bit,#function
test_and_change_bit:	/* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	BITOP_PRE_BARRIER
	srlx	%o0, 6, %g1
	mov	1, %o2
@@ -86,18 +91,20 @@ test_and_change_bit: /* %o0=nr, %o1=addr */
	xor	%g7, %o2, %g1
	casx	[%o1], %g7, %g1
	cmp	%g7, %g1
	bne,pn	%xcc, 1b
	bne,pn	%xcc, 2f
	 and	%g7, %o2, %g2
	clr	%o0
	movrne	%g2, 1, %o0
	BITOP_POST_BARRIER
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	test_and_change_bit, .-test_and_change_bit

	.globl	set_bit
	.type	set_bit,#function
set_bit:		/* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	srlx	%o0, 6, %g1
	mov	1, %o2
	sllx	%g1, 3, %g3
@@ -108,15 +115,17 @@ set_bit: /* %o0=nr, %o1=addr */
	or	%g7, %o2, %g1
	casx	[%o1], %g7, %g1
	cmp	%g7, %g1
	bne,pn	%xcc, 1b
	bne,pn	%xcc, 2f
	 nop
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	set_bit, .-set_bit

	.globl	clear_bit
	.type	clear_bit,#function
clear_bit:		/* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	srlx	%o0, 6, %g1
	mov	1, %o2
	sllx	%g1, 3, %g3
@@ -127,15 +136,17 @@ clear_bit: /* %o0=nr, %o1=addr */
	andn	%g7, %o2, %g1
	casx	[%o1], %g7, %g1
	cmp	%g7, %g1
	bne,pn	%xcc, 1b
	bne,pn	%xcc, 2f
	 nop
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	clear_bit, .-clear_bit

	.globl	change_bit
	.type	change_bit,#function
change_bit:		/* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	srlx	%o0, 6, %g1
	mov	1, %o2
	sllx	%g1, 3, %g3
@@ -146,8 +157,9 @@ change_bit: /* %o0=nr, %o1=addr */
	xor	%g7, %o2, %g1
	casx	[%o1], %g7, %g1
	cmp	%g7, %g1
	bne,pn	%xcc, 1b
	bne,pn	%xcc, 2f
	 nop
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	change_bit, .-change_bit
+28 −0
Original line number Diff line number Diff line
#ifndef _SPARC64_BACKOFF_H
#define _SPARC64_BACKOFF_H

#define BACKOFF_LIMIT	(4 * 1024)

#ifdef CONFIG_SMP

#define BACKOFF_SETUP(reg)	\
	mov	1, reg

#define BACKOFF_SPIN(reg, tmp, label)	\
	mov	reg, tmp; \
88:	brnz,pt	tmp, 88b; \
	 sub	tmp, 1, tmp; \
	cmp	reg, BACKOFF_LIMIT; \
	bg,pn	%xcc, label; \
	 nop; \
	ba,pt	%xcc, label; \
	 sllx	reg, 1, reg;

#else

#define BACKOFF_SETUP(reg)
#define BACKOFF_SPIN(reg, tmp, label)

#endif

#endif /* _SPARC64_BACKOFF_H */