Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 62479586 authored by Catalin Marinas's avatar Catalin Marinas
Browse files

arm64: klib: Optimised atomic bitops



This patch implements the AArch64-specific atomic bitops functions using
exclusive memory accesses to avoid locking.

Signed-off-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
parent 2b8cac81
Loading
Loading
Loading
Loading
+16 −2
Original line number Original line Diff line number Diff line
@@ -32,6 +32,16 @@
#error only <linux/bitops.h> can be included directly
#error only <linux/bitops.h> can be included directly
#endif
#endif


/*
 * Little endian assembly atomic bitops.
 */
extern void set_bit(int nr, volatile unsigned long *p);
extern void clear_bit(int nr, volatile unsigned long *p);
extern void change_bit(int nr, volatile unsigned long *p);
extern int test_and_set_bit(int nr, volatile unsigned long *p);
extern int test_and_clear_bit(int nr, volatile unsigned long *p);
extern int test_and_change_bit(int nr, volatile unsigned long *p);

#include <asm-generic/bitops/builtin-__ffs.h>
#include <asm-generic/bitops/builtin-__ffs.h>
#include <asm-generic/bitops/builtin-ffs.h>
#include <asm-generic/bitops/builtin-ffs.h>
#include <asm-generic/bitops/builtin-__fls.h>
#include <asm-generic/bitops/builtin-__fls.h>
@@ -45,9 +55,13 @@
#include <asm-generic/bitops/hweight.h>
#include <asm-generic/bitops/hweight.h>
#include <asm-generic/bitops/lock.h>
#include <asm-generic/bitops/lock.h>


#include <asm-generic/bitops/atomic.h>
#include <asm-generic/bitops/non-atomic.h>
#include <asm-generic/bitops/non-atomic.h>
#include <asm-generic/bitops/le.h>
#include <asm-generic/bitops/le.h>
#include <asm-generic/bitops/ext2-atomic.h>

/*
 * Ext2 is defined to use little-endian byte ordering.
 */
#define ext2_set_bit_atomic(lock, nr, p)	test_and_set_bit_le(nr, p)
#define ext2_clear_bit_atomic(lock, nr, p)	test_and_clear_bit_le(nr, p)


#endif /* __ASM_BITOPS_H */
#endif /* __ASM_BITOPS_H */
+8 −5
Original line number Original line Diff line number Diff line
@@ -39,11 +39,6 @@ EXPORT_SYMBOL(__copy_from_user);
EXPORT_SYMBOL(__copy_to_user);
EXPORT_SYMBOL(__copy_to_user);
EXPORT_SYMBOL(__clear_user);
EXPORT_SYMBOL(__clear_user);


	/* bitops */
#ifdef CONFIG_SMP
EXPORT_SYMBOL(__atomic_hash);
#endif

	/* physical memory */
	/* physical memory */
EXPORT_SYMBOL(memstart_addr);
EXPORT_SYMBOL(memstart_addr);


@@ -54,3 +49,11 @@ EXPORT_SYMBOL(memset);
EXPORT_SYMBOL(memcpy);
EXPORT_SYMBOL(memcpy);
EXPORT_SYMBOL(memmove);
EXPORT_SYMBOL(memmove);
EXPORT_SYMBOL(memchr);
EXPORT_SYMBOL(memchr);

	/* atomic bitops */
EXPORT_SYMBOL(set_bit);
EXPORT_SYMBOL(test_and_set_bit);
EXPORT_SYMBOL(clear_bit);
EXPORT_SYMBOL(test_and_clear_bit);
EXPORT_SYMBOL(change_bit);
EXPORT_SYMBOL(test_and_change_bit);
+70 −0
Original line number Original line Diff line number Diff line
/*
/*
 * Copyright (C) 2012 ARM Limited
 * Based on arch/arm/lib/bitops.h
 *
 * Copyright (C) 2013 ARM Ltd.
 *
 *
 * This program is free software; you can redistribute it and/or modify
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * it under the terms of the GNU General Public License version 2 as
@@ -14,12 +16,55 @@
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */
 */


#include <linux/kernel.h>
#include <linux/linkage.h>
#include <linux/spinlock.h>
#include <asm/assembler.h>
#include <linux/atomic.h>

/*
 * x0: bits 5:0  bit offset
 *     bits 63:6 word offset
 * x1: address
 */
	.macro	bitop, name, instr
ENTRY(	\name	)
	and	x3, x0, #63		// Get bit offset
	eor	x0, x0, x3		// Clear low bits
	mov	x2, #1
	add	x1, x1, x0, lsr #3	// Get word offset
	lsl	x3, x2, x3		// Create mask
1:	ldxr	x2, [x1]
	\instr	x2, x2, x3
	stxr	w0, x2, [x1]
	cbnz	w0, 1b
	ret
ENDPROC(\name	)
	.endm

	.macro	testop, name, instr
ENTRY(	\name	)
	and	x3, x0, #63		// Get bit offset
	eor	x0, x0, x3		// Clear low bits
	mov	x2, #1
	add	x1, x1, x0, lsr #3	// Get word offset
	lsl	x4, x2, x3		// Create mask
	smp_dmb	ish
1:	ldxr	x2, [x1]
	lsr	x0, x2, x3		// Save old value of bit
	\instr	x2, x2, x4		// toggle bit
	stxr	w2, x2, [x1]
	cbnz	w2, 1b
	smp_dmb	ish
	and	x0, x0, #1
3:	ret
ENDPROC(\name	)
	.endm

/*
 * Atomic bit operations.
 */
	bitop	change_bit, eor
	bitop	clear_bit, bic
	bitop	set_bit, orr


#ifdef CONFIG_SMP
	testop	test_and_change_bit, eor
arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = {
	testop	test_and_clear_bit, bic
       [0 ... (ATOMIC_HASH_SIZE-1)]  = __ARCH_SPIN_LOCK_UNLOCKED
	testop	test_and_set_bit, orr
};
#endif