Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 8695c37d authored by David S. Miller's avatar David S. Miller
Browse files

sparc: Convert some assembler over to linakge.h's ENTRY/ENDPROC



Use those, instead of doing it all by hand.

Signed-off-by: default avatarDavid S. Miller <davem@davemloft.net>
parent b55e81b9
Loading
Loading
Loading
Loading
+4 −3
Original line number Diff line number Diff line
@@ -5,10 +5,10 @@
 * Copyright (C) 1999 David S. Miller (davem@redhat.com)
 */

#include <linux/linkage.h>

	.text
	.align	4
	.globl	__ashldi3
__ashldi3:
ENTRY(__ashldi3)
	cmp	%o2, 0
	be	9f
	 mov	0x20, %g2
@@ -32,3 +32,4 @@ __ashldi3:
9:
	retl
	 nop
ENDPROC(__ashldi3)
+4 −3
Original line number Diff line number Diff line
@@ -5,10 +5,10 @@
 * Copyright (C) 1995 David S. Miller (davem@caip.rutgers.edu)
 */

#include <linux/linkage.h>

	.text
	.align	4
	.globl __ashrdi3
__ashrdi3:
ENTRY(__ashrdi3)
	tst	%o2
	be	3f
	 or	%g0, 32, %g2
@@ -34,3 +34,4 @@ __ashrdi3:
3:
	jmpl	%o7 + 8, %g0
	 nop
ENDPROC(__ashrdi3)
+17 −32
Original line number Diff line number Diff line
@@ -3,6 +3,7 @@
 * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
 */

#include <linux/linkage.h>
#include <asm/asi.h>
#include <asm/backoff.h>

@@ -13,9 +14,7 @@
	 * memory barriers, and a second which returns
	 * a value and does the barriers.
	 */
	.globl	atomic_add
	.type	atomic_add,#function
atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
ENTRY(atomic_add) /* %o0 = increment, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	lduw	[%o1], %g1
	add	%g1, %o0, %g7
@@ -26,11 +25,9 @@ atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic_add, .-atomic_add
ENDPROC(atomic_add)

	.globl	atomic_sub
	.type	atomic_sub,#function
atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
ENTRY(atomic_sub) /* %o0 = decrement, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	lduw	[%o1], %g1
	sub	%g1, %o0, %g7
@@ -41,11 +38,9 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic_sub, .-atomic_sub
ENDPROC(atomic_sub)

	.globl	atomic_add_ret
	.type	atomic_add_ret,#function
atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
ENTRY(atomic_add_ret) /* %o0 = increment, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	lduw	[%o1], %g1
	add	%g1, %o0, %g7
@@ -56,11 +51,9 @@ atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
	retl
	 sra	%g1, 0, %o0
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic_add_ret, .-atomic_add_ret
ENDPROC(atomic_add_ret)

	.globl	atomic_sub_ret
	.type	atomic_sub_ret,#function
atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
ENTRY(atomic_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	lduw	[%o1], %g1
	sub	%g1, %o0, %g7
@@ -71,11 +64,9 @@ atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
	retl
	 sra	%g1, 0, %o0
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic_sub_ret, .-atomic_sub_ret
ENDPROC(atomic_sub_ret)

	.globl	atomic64_add
	.type	atomic64_add,#function
atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
ENTRY(atomic64_add) /* %o0 = increment, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	ldx	[%o1], %g1
	add	%g1, %o0, %g7
@@ -86,11 +77,9 @@ atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic64_add, .-atomic64_add
ENDPROC(atomic64_add)

	.globl	atomic64_sub
	.type	atomic64_sub,#function
atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
ENTRY(atomic64_sub) /* %o0 = decrement, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	ldx	[%o1], %g1
	sub	%g1, %o0, %g7
@@ -101,11 +90,9 @@ atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
	retl
	 nop
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic64_sub, .-atomic64_sub
ENDPROC(atomic64_sub)

	.globl	atomic64_add_ret
	.type	atomic64_add_ret,#function
atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
ENTRY(atomic64_add_ret) /* %o0 = increment, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	ldx	[%o1], %g1
	add	%g1, %o0, %g7
@@ -116,11 +103,9 @@ atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
	retl
	 add	%g1, %o0, %o0
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic64_add_ret, .-atomic64_add_ret
ENDPROC(atomic64_add_ret)

	.globl	atomic64_sub_ret
	.type	atomic64_sub_ret,#function
atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
ENTRY(atomic64_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */
	BACKOFF_SETUP(%o2)
1:	ldx	[%o1], %g1
	sub	%g1, %o0, %g7
@@ -131,4 +116,4 @@ atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
	retl
	 sub	%g1, %o0, %o0
2:	BACKOFF_SPIN(%o2, %o3, 1b)
	.size	atomic64_sub_ret, .-atomic64_sub_ret
ENDPROC(atomic64_sub_ret)
+13 −24
Original line number Diff line number Diff line
@@ -3,14 +3,13 @@
 * Copyright (C) 2000, 2007 David S. Miller (davem@davemloft.net)
 */

#include <linux/linkage.h>
#include <asm/asi.h>
#include <asm/backoff.h>

	.text

	.globl	test_and_set_bit
	.type	test_and_set_bit,#function
test_and_set_bit:	/* %o0=nr, %o1=addr */
ENTRY(test_and_set_bit)	/* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	srlx	%o0, 6, %g1
	mov	1, %o2
@@ -29,11 +28,9 @@ test_and_set_bit: /* %o0=nr, %o1=addr */
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	test_and_set_bit, .-test_and_set_bit
ENDPROC(test_and_set_bit)

	.globl	test_and_clear_bit
	.type	test_and_clear_bit,#function
test_and_clear_bit:	/* %o0=nr, %o1=addr */
ENTRY(test_and_clear_bit) /* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	srlx	%o0, 6, %g1
	mov	1, %o2
@@ -52,11 +49,9 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	test_and_clear_bit, .-test_and_clear_bit
ENDPROC(test_and_clear_bit)

	.globl	test_and_change_bit
	.type	test_and_change_bit,#function
test_and_change_bit:	/* %o0=nr, %o1=addr */
ENTRY(test_and_change_bit) /* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	srlx	%o0, 6, %g1
	mov	1, %o2
@@ -75,11 +70,9 @@ test_and_change_bit: /* %o0=nr, %o1=addr */
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	test_and_change_bit, .-test_and_change_bit
ENDPROC(test_and_change_bit)

	.globl	set_bit
	.type	set_bit,#function
set_bit:		/* %o0=nr, %o1=addr */
ENTRY(set_bit) /* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	srlx	%o0, 6, %g1
	mov	1, %o2
@@ -96,11 +89,9 @@ set_bit: /* %o0=nr, %o1=addr */
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	set_bit, .-set_bit
ENDPROC(set_bit)

	.globl	clear_bit
	.type	clear_bit,#function
clear_bit:		/* %o0=nr, %o1=addr */
ENTRY(clear_bit) /* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	srlx	%o0, 6, %g1
	mov	1, %o2
@@ -117,11 +108,9 @@ clear_bit: /* %o0=nr, %o1=addr */
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	clear_bit, .-clear_bit
ENDPROC(clear_bit)

	.globl	change_bit
	.type	change_bit,#function
change_bit:		/* %o0=nr, %o1=addr */
ENTRY(change_bit) /* %o0=nr, %o1=addr */
	BACKOFF_SETUP(%o3)
	srlx	%o0, 6, %g1
	mov	1, %o2
@@ -138,4 +127,4 @@ change_bit: /* %o0=nr, %o1=addr */
	retl
	 nop
2:	BACKOFF_SPIN(%o3, %o4, 1b)
	.size	change_bit, .-change_bit
ENDPROC(change_bit)
+5 −5
Original line number Diff line number Diff line
@@ -4,6 +4,7 @@
 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
 */

#include <linux/linkage.h>
#include <asm/page.h>

	/* Zero out 64 bytes of memory at (buf + offset).
@@ -44,10 +45,7 @@
	 */

	.text
	.align	4
	.globl	bzero_1page, __copy_1page

bzero_1page:
ENTRY(bzero_1page)
/* NOTE: If you change the number of insns of this routine, please check
 * arch/sparc/mm/hypersparc.S */
	/* %o0 = buf */
@@ -65,8 +63,9 @@ bzero_1page:

	retl
	 nop
ENDPROC(bzero_1page)

__copy_1page:
ENTRY(__copy_1page)
/* NOTE: If you change the number of insns of this routine, please check
 * arch/sparc/mm/hypersparc.S */
	/* %o0 = dst, %o1 = src */
@@ -87,3 +86,4 @@ __copy_1page:

	retl
	 nop
ENDPROC(__copy_1page)
Loading