Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit edc9a958 authored by Paul Mundt's avatar Paul Mundt
Browse files

sh: nommu: Support building without an uncached mapping.



Now that nommu selects 32BIT we run in to the situation where SH-2A
supports an uncached identity mapping by way of the BSC, while the SH-2
does not. This provides stubs for the PC manglers and tidies up some of
the system*.h mess in the process.

Signed-off-by: default avatarPaul Mundt <lethal@linux-sh.org>
parent e2fcf74f
Loading
Loading
Loading
Loading
+1 −3
Original line number Original line Diff line number Diff line
@@ -10,6 +10,7 @@
#include <linux/compiler.h>
#include <linux/compiler.h>
#include <linux/linkage.h>
#include <linux/linkage.h>
#include <asm/types.h>
#include <asm/types.h>
#include <asm/uncached.h>


#define AT_VECTOR_SIZE_ARCH 5 /* entries in ARCH_DLINFO */
#define AT_VECTOR_SIZE_ARCH 5 /* entries in ARCH_DLINFO */


@@ -137,9 +138,6 @@ extern unsigned int instruction_size(unsigned int insn);
#define instruction_size(insn)	(4)
#define instruction_size(insn)	(4)
#endif
#endif


extern unsigned long cached_to_uncached;
extern unsigned long uncached_size;

void per_cpu_trap_init(void);
void per_cpu_trap_init(void);
void default_idle(void);
void default_idle(void);
void cpu_idle_wait(void);
void cpu_idle_wait(void);
+0 −36
Original line number Original line Diff line number Diff line
@@ -145,42 +145,6 @@ do { \
		__restore_dsp(prev);				\
		__restore_dsp(prev);				\
} while (0)
} while (0)


/*
 * Jump to uncached area.
 * When handling TLB or caches, we need to do it from an uncached area.
 */
#define jump_to_uncached()			\
do {						\
	unsigned long __dummy;			\
						\
	__asm__ __volatile__(			\
		"mova	1f, %0\n\t"		\
		"add	%1, %0\n\t"		\
		"jmp	@%0\n\t"		\
		" nop\n\t"			\
		".balign 4\n"			\
		"1:"				\
		: "=&z" (__dummy)		\
		: "r" (cached_to_uncached));	\
} while (0)

/*
 * Back to cached area.
 */
#define back_to_cached()				\
do {							\
	unsigned long __dummy;				\
	ctrl_barrier();					\
	__asm__ __volatile__(				\
		"mov.l	1f, %0\n\t"			\
		"jmp	@%0\n\t"			\
		" nop\n\t"				\
		".balign 4\n"				\
		"1:	.long 2f\n"			\
		"2:"					\
		: "=&r" (__dummy));			\
} while (0)

#ifdef CONFIG_CPU_HAS_SR_RB
#ifdef CONFIG_CPU_HAS_SR_RB
#define lookup_exception_vector()	\
#define lookup_exception_vector()	\
({					\
({					\
+0 −3
Original line number Original line Diff line number Diff line
@@ -34,9 +34,6 @@ do { \
			      &next->thread);			\
			      &next->thread);			\
} while (0)
} while (0)


#define jump_to_uncached()	do { } while (0)
#define back_to_cached()	do { } while (0)

#define __icbi(addr)	__asm__ __volatile__ ( "icbi %0, 0\n\t" : : "r" (addr))
#define __icbi(addr)	__asm__ __volatile__ ( "icbi %0, 0\n\t" : : "r" (addr))
#define __ocbp(addr)	__asm__ __volatile__ ( "ocbp %0, 0\n\t" : : "r" (addr))
#define __ocbp(addr)	__asm__ __volatile__ ( "ocbp %0, 0\n\t" : : "r" (addr))
#define __ocbi(addr)	__asm__ __volatile__ ( "ocbi %0, 0\n\t" : : "r" (addr))
#define __ocbi(addr)	__asm__ __volatile__ ( "ocbi %0, 0\n\t" : : "r" (addr))
+40 −0
Original line number Original line Diff line number Diff line
@@ -4,15 +4,55 @@
#include <linux/bug.h>
#include <linux/bug.h>


#ifdef CONFIG_UNCACHED_MAPPING
#ifdef CONFIG_UNCACHED_MAPPING
extern unsigned long cached_to_uncached;
extern unsigned long uncached_size;
extern unsigned long uncached_start, uncached_end;
extern unsigned long uncached_start, uncached_end;


extern int virt_addr_uncached(unsigned long kaddr);
extern int virt_addr_uncached(unsigned long kaddr);
extern void uncached_init(void);
extern void uncached_init(void);
extern void uncached_resize(unsigned long size);
extern void uncached_resize(unsigned long size);

/*
 * Jump to uncached area.
 * When handling TLB or caches, we need to do it from an uncached area.
 */
#define jump_to_uncached()			\
do {						\
	unsigned long __dummy;			\
						\
	__asm__ __volatile__(			\
		"mova	1f, %0\n\t"		\
		"add	%1, %0\n\t"		\
		"jmp	@%0\n\t"		\
		" nop\n\t"			\
		".balign 4\n"			\
		"1:"				\
		: "=&z" (__dummy)		\
		: "r" (cached_to_uncached));	\
} while (0)

/*
 * Back to cached area.
 */
#define back_to_cached()				\
do {							\
	unsigned long __dummy;				\
	ctrl_barrier();					\
	__asm__ __volatile__(				\
		"mov.l	1f, %0\n\t"			\
		"jmp	@%0\n\t"			\
		" nop\n\t"				\
		".balign 4\n"				\
		"1:	.long 2f\n"			\
		"2:"					\
		: "=&r" (__dummy));			\
} while (0)
#else
#else
#define virt_addr_uncached(kaddr)	(0)
#define virt_addr_uncached(kaddr)	(0)
#define uncached_init()			do { } while (0)
#define uncached_init()			do { } while (0)
#define uncached_resize(size)		BUG()
#define uncached_resize(size)		BUG()
#define jump_to_uncached()		do { } while (0)
#define back_to_cached()		do { } while (0)
#endif
#endif


#endif /* __ASM_SH_UNCACHED_H */
#endif /* __ASM_SH_UNCACHED_H */