Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 8005711c authored by Manuel Lauss's avatar Manuel Lauss Committed by Ralf Baechle
Browse files

MIPS: Extend DMA_MAYBE_COHERENT logic to DMA_NONCOHERENT use



Setting DMA_MAYBE_COHERENT gives a platform the opportunity to select
use of cache ops at boot.

Signed-off-by: default avatarManuel Lauss <manuel.lauss@gmail.com>
Cc: Linux-MIPS <linux-mips@linux-mips.org>
Patchwork: https://patchwork.linux-mips.org/patch/6575/


Signed-off-by: default avatarRalf Baechle <ralf@linux-mips.org>
parent bfc3c5a6
Loading
Loading
Loading
Loading
+2 −2
Original line number Original line Diff line number Diff line
@@ -584,7 +584,7 @@ static inline void memcpy_toio(volatile void __iomem *dst, const void *src, int
 *
 *
 * This API used to be exported; it now is for arch code internal use only.
 * This API used to be exported; it now is for arch code internal use only.
 */
 */
#ifdef CONFIG_DMA_NONCOHERENT
#if defined(CONFIG_DMA_NONCOHERENT) || defined(CONFIG_DMA_MAYBE_COHERENT)


extern void (*_dma_cache_wback_inv)(unsigned long start, unsigned long size);
extern void (*_dma_cache_wback_inv)(unsigned long start, unsigned long size);
extern void (*_dma_cache_wback)(unsigned long start, unsigned long size);
extern void (*_dma_cache_wback)(unsigned long start, unsigned long size);
@@ -603,7 +603,7 @@ extern void (*_dma_cache_inv)(unsigned long start, unsigned long size);
#define dma_cache_inv(start,size)	\
#define dma_cache_inv(start,size)	\
	do { (void) (start); (void) (size); } while (0)
	do { (void) (start); (void) (size); } while (0)


#endif /* CONFIG_DMA_NONCOHERENT */
#endif /* CONFIG_DMA_NONCOHERENT || CONFIG_DMA_MAYBE_COHERENT */


/*
/*
 * Read a 32-bit register that requires a 64-bit read cycle on the bus.
 * Read a 32-bit register that requires a 64-bit read cycle on the bus.
+3 −3
Original line number Original line Diff line number Diff line
@@ -673,7 +673,7 @@ static void r4k_flush_icache_range(unsigned long start, unsigned long end)
	instruction_hazard();
	instruction_hazard();
}
}


#ifdef CONFIG_DMA_NONCOHERENT
#if defined(CONFIG_DMA_NONCOHERENT) || defined(CONFIG_DMA_MAYBE_COHERENT)


static void r4k_dma_cache_wback_inv(unsigned long addr, unsigned long size)
static void r4k_dma_cache_wback_inv(unsigned long addr, unsigned long size)
{
{
@@ -744,7 +744,7 @@ static void r4k_dma_cache_inv(unsigned long addr, unsigned long size)
	bc_inv(addr, size);
	bc_inv(addr, size);
	__sync();
	__sync();
}
}
#endif /* CONFIG_DMA_NONCOHERENT */
#endif /* CONFIG_DMA_NONCOHERENT || CONFIG_DMA_MAYBE_COHERENT */


/*
/*
 * While we're protected against bad userland addresses we don't care
 * While we're protected against bad userland addresses we don't care
@@ -1559,7 +1559,7 @@ void r4k_cache_init(void)
	flush_icache_range	= r4k_flush_icache_range;
	flush_icache_range	= r4k_flush_icache_range;
	local_flush_icache_range	= local_r4k_flush_icache_range;
	local_flush_icache_range	= local_r4k_flush_icache_range;


#if defined(CONFIG_DMA_NONCOHERENT)
#if defined(CONFIG_DMA_NONCOHERENT) || defined(CONFIG_DMA_MAYBE_COHERENT)
	if (coherentio) {
	if (coherentio) {
		_dma_cache_wback_inv	= (void *)cache_noop;
		_dma_cache_wback_inv	= (void *)cache_noop;
		_dma_cache_wback	= (void *)cache_noop;
		_dma_cache_wback	= (void *)cache_noop;
+2 −2
Original line number Original line Diff line number Diff line
@@ -49,7 +49,7 @@ EXPORT_SYMBOL_GPL(local_flush_data_cache_page);
EXPORT_SYMBOL(flush_data_cache_page);
EXPORT_SYMBOL(flush_data_cache_page);
EXPORT_SYMBOL(flush_icache_all);
EXPORT_SYMBOL(flush_icache_all);


#ifdef CONFIG_DMA_NONCOHERENT
#if defined(CONFIG_DMA_NONCOHERENT) || defined(CONFIG_DMA_MAYBE_COHERENT)


/* DMA cache operations. */
/* DMA cache operations. */
void (*_dma_cache_wback_inv)(unsigned long start, unsigned long size);
void (*_dma_cache_wback_inv)(unsigned long start, unsigned long size);
@@ -58,7 +58,7 @@ void (*_dma_cache_inv)(unsigned long start, unsigned long size);


EXPORT_SYMBOL(_dma_cache_wback_inv);
EXPORT_SYMBOL(_dma_cache_wback_inv);


#endif /* CONFIG_DMA_NONCOHERENT */
#endif /* CONFIG_DMA_NONCOHERENT || CONFIG_DMA_MAYBE_COHERENT */


/*
/*
 * We could optimize the case where the cache argument is not BCACHE but
 * We could optimize the case where the cache argument is not BCACHE but