Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 05490626 authored by Ralf Baechle's avatar Ralf Baechle
Browse files

MIPS: Move definitions for 32/64-bit agonstic inline assembler to new file.



Inspired by Markos Chandras' patch.  I just didn't want do pull bitsops.h
into pgtable.h.

Signed-off-by: default avatarRalf Baechle <ralf@linux-mips.org>
References: https://patchwork.linux-mips.org/patch/11052/
parent 92e9953c
Loading
Loading
Loading
Loading
+1 −16
Original line number Diff line number Diff line
@@ -19,25 +19,10 @@
#include <asm/byteorder.h>		/* sigh ... */
#include <asm/compiler.h>
#include <asm/cpu-features.h>
#include <asm/llsc.h>
#include <asm/sgidefs.h>
#include <asm/war.h>

#if _MIPS_SZLONG == 32
#define SZLONG_LOG 5
#define SZLONG_MASK 31UL
#define __LL		"ll	"
#define __SC		"sc	"
#define __INS		"ins	"
#define __EXT		"ext	"
#elif _MIPS_SZLONG == 64
#define SZLONG_LOG 6
#define SZLONG_MASK 63UL
#define __LL		"lld	"
#define __SC		"scd	"
#define __INS		"dins	 "
#define __EXT		"dext	 "
#endif

/*
 * These are the "slower" versions of the functions and are in bitops.c.
 * These functions call raw_local_irq_{save,restore}().
+28 −0
Original line number Diff line number Diff line
/*
 * This file is subject to the terms and conditions of the GNU General Public
 * License.  See the file "COPYING" in the main directory of this archive
 * for more details.
 *
 * Macros for 32/64-bit neutral inline assembler
 */

#ifndef __ASM_LLSC_H
#define __ASM_LLSC_H

#if _MIPS_SZLONG == 32
#define SZLONG_LOG 5
#define SZLONG_MASK 31UL
#define __LL		"ll	"
#define __SC		"sc	"
#define __INS		"ins	"
#define __EXT		"ext	"
#elif _MIPS_SZLONG == 64
#define SZLONG_LOG 6
#define SZLONG_MASK 63UL
#define __LL		"lld	"
#define __SC		"scd	"
#define __INS		"dins	"
#define __EXT		"dext	"
#endif

#endif /* __ASM_LLSC_H  */
+2 −9
Original line number Diff line number Diff line
@@ -187,23 +187,16 @@ static inline void set_pte(pte_t *ptep, pte_t pteval)
		 * For SMP, multiple CPUs can race, so we need to do
		 * this atomically.
		 */
#ifdef CONFIG_64BIT
#define LL_INSN "lld"
#define SC_INSN "scd"
#else /* CONFIG_32BIT */
#define LL_INSN "ll"
#define SC_INSN "sc"
#endif
		unsigned long page_global = _PAGE_GLOBAL;
		unsigned long tmp;

		__asm__ __volatile__ (
			"	.set	push\n"
			"	.set	noreorder\n"
			"1:	" LL_INSN "	%[tmp], %[buddy]\n"
			"1:	" __LL "	%[tmp], %[buddy]\n"
			"	bnez	%[tmp], 2f\n"
			"	 or	%[tmp], %[tmp], %[global]\n"
			"	" SC_INSN "	%[tmp], %[buddy]\n"
			"	" __SC "	%[tmp], %[buddy]\n"
			"	beqz	%[tmp], 1b\n"
			"	 nop\n"
			"2:\n"