Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 9289ea7f authored by David S. Miller's avatar David S. Miller Committed by Masahiro Yamada
Browse files

sparc64: Use indirect calls in hamming weight stubs



Otherwise, depending upon link order, the branch relocation
limits could be exceeded.

Signed-off-by: default avatarDavid S. Miller <davem@davemloft.net>
Signed-off-by: default avatarMasahiro Yamada <yamada.masahiro@socionext.com>
parent 3c2993b8
Loading
Loading
Loading
Loading
+8 −8
Original line number Original line Diff line number Diff line
@@ -4,8 +4,8 @@
	.text
	.text
	.align	32
	.align	32
ENTRY(__arch_hweight8)
ENTRY(__arch_hweight8)
	ba,pt	%xcc, __sw_hweight8
	sethi	%hi(__sw_hweight8), %g1
	 nop
	jmpl	%g1 + %lo(__sw_hweight8), %g0
	 nop
	 nop
ENDPROC(__arch_hweight8)
ENDPROC(__arch_hweight8)
EXPORT_SYMBOL(__arch_hweight8)
EXPORT_SYMBOL(__arch_hweight8)
@@ -17,8 +17,8 @@ EXPORT_SYMBOL(__arch_hweight8)
	.previous
	.previous


ENTRY(__arch_hweight16)
ENTRY(__arch_hweight16)
	ba,pt	%xcc, __sw_hweight16
	sethi	%hi(__sw_hweight16), %g1
	 nop
	jmpl	%g1 + %lo(__sw_hweight16), %g0
	 nop
	 nop
ENDPROC(__arch_hweight16)
ENDPROC(__arch_hweight16)
EXPORT_SYMBOL(__arch_hweight16)
EXPORT_SYMBOL(__arch_hweight16)
@@ -30,8 +30,8 @@ EXPORT_SYMBOL(__arch_hweight16)
	.previous
	.previous


ENTRY(__arch_hweight32)
ENTRY(__arch_hweight32)
	ba,pt	%xcc, __sw_hweight32
	sethi	%hi(__sw_hweight32), %g1
	 nop
	jmpl	%g1 + %lo(__sw_hweight32), %g0
	 nop
	 nop
ENDPROC(__arch_hweight32)
ENDPROC(__arch_hweight32)
EXPORT_SYMBOL(__arch_hweight32)
EXPORT_SYMBOL(__arch_hweight32)
@@ -43,8 +43,8 @@ EXPORT_SYMBOL(__arch_hweight32)
	.previous
	.previous


ENTRY(__arch_hweight64)
ENTRY(__arch_hweight64)
	ba,pt	%xcc, __sw_hweight64
	sethi	%hi(__sw_hweight16), %g1
	 nop
	jmpl	%g1 + %lo(__sw_hweight16), %g0
	 nop
	 nop
ENDPROC(__arch_hweight64)
ENDPROC(__arch_hweight64)
EXPORT_SYMBOL(__arch_hweight64)
EXPORT_SYMBOL(__arch_hweight64)