Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 702b94bf authored by Russell King's avatar Russell King
Browse files

ARM: dma-mapping: remove dmac_clean_range and dmac_inv_range



These are now unused, and so can be removed.

Signed-off-by: default avatarRussell King <rmk+kernel@arm.linux.org.uk>
Tested-By: default avatarSantosh Shilimkar <santosh.shilimkar@ti.com>
parent a9c9147e
Loading
Loading
Loading
Loading
+0 −23
Original line number Diff line number Diff line
@@ -182,21 +182,6 @@
 *	DMA Cache Coherency
 *	===================
 *
 *	dma_inv_range(start, end)
 *
 *		Invalidate (discard) the specified virtual address range.
 *		May not write back any entries.  If 'start' or 'end'
 *		are not cache line aligned, those lines must be written
 *		back.
 *		- start  - virtual start address
 *		- end    - virtual end address
 *
 *	dma_clean_range(start, end)
 *
 *		Clean (write back) the specified virtual address range.
 *		- start  - virtual start address
 *		- end    - virtual end address
 *
 *	dma_flush_range(start, end)
 *
 *		Clean and invalidate the specified virtual address range.
@@ -216,8 +201,6 @@ struct cpu_cache_fns {
	void (*dma_map_area)(const void *, size_t, int);
	void (*dma_unmap_area)(const void *, size_t, int);

	void (*dma_inv_range)(const void *, const void *);
	void (*dma_clean_range)(const void *, const void *);
	void (*dma_flush_range)(const void *, const void *);
};

@@ -249,8 +232,6 @@ extern struct cpu_cache_fns cpu_cache;
 */
#define dmac_map_area			cpu_cache.dma_map_area
#define dmac_unmap_area		cpu_cache.dma_unmap_area
#define dmac_inv_range			cpu_cache.dma_inv_range
#define dmac_clean_range		cpu_cache.dma_clean_range
#define dmac_flush_range		cpu_cache.dma_flush_range

#else
@@ -277,14 +258,10 @@ extern void __cpuc_flush_dcache_area(void *, size_t);
 */
#define dmac_map_area			__glue(_CACHE,_dma_map_area)
#define dmac_unmap_area		__glue(_CACHE,_dma_unmap_area)
#define dmac_inv_range			__glue(_CACHE,_dma_inv_range)
#define dmac_clean_range		__glue(_CACHE,_dma_clean_range)
#define dmac_flush_range		__glue(_CACHE,_dma_flush_range)

extern void dmac_map_area(const void *, size_t, int);
extern void dmac_unmap_area(const void *, size_t, int);
extern void dmac_inv_range(const void *, const void *);
extern void dmac_clean_range(const void *, const void *);
extern void dmac_flush_range(const void *, const void *);

#endif
+2 −4
Original line number Diff line number Diff line
@@ -157,7 +157,7 @@ ENTRY(fa_flush_kern_dcache_area)
 *	- start  - virtual start address
 *	- end	 - virtual end address
 */
ENTRY(fa_dma_inv_range)
fa_dma_inv_range:
	tst	r0, #CACHE_DLINESIZE - 1
	bic	r0, r0, #CACHE_DLINESIZE - 1
	mcrne	p15, 0, r0, c7, c14, 1		@ clean & invalidate D entry
@@ -180,7 +180,7 @@ ENTRY(fa_dma_inv_range)
 *	- start  - virtual start address
 *	- end	 - virtual end address
 */
ENTRY(fa_dma_clean_range)
fa_dma_clean_range:
	bic	r0, r0, #CACHE_DLINESIZE - 1
1:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
	add	r0, r0, #CACHE_DLINESIZE
@@ -241,7 +241,5 @@ ENTRY(fa_cache_fns)
	.long	fa_flush_kern_dcache_area
	.long	fa_dma_map_area
	.long	fa_dma_unmap_area
	.long	fa_dma_inv_range
	.long	fa_dma_clean_range
	.long	fa_dma_flush_range
	.size	fa_cache_fns, . - fa_cache_fns
+1 −28
Original line number Diff line number Diff line
@@ -83,20 +83,6 @@ ENTRY(v3_coherent_user_range)
ENTRY(v3_flush_kern_dcache_area)
	/* FALLTHROUGH */

/*
 *	dma_inv_range(start, end)
 *
 *	Invalidate (discard) the specified virtual address range.
 *	May not write back any entries.  If 'start' or 'end'
 *	are not cache line aligned, those lines must be written
 *	back.
 *
 *	- start  - virtual start address
 *	- end	 - virtual end address
 */
ENTRY(v3_dma_inv_range)
	/* FALLTHROUGH */

/*
 *	dma_flush_range(start, end)
 *
@@ -108,17 +94,6 @@ ENTRY(v3_dma_inv_range)
ENTRY(v3_dma_flush_range)
	mov	r0, #0
	mcr	p15, 0, r0, c7, c0, 0		@ flush ID cache
	/* FALLTHROUGH */

/*
 *	dma_clean_range(start, end)
 *
 *	Clean (write back) the specified virtual address range.
 *
 *	- start  - virtual start address
 *	- end	 - virtual end address
 */
ENTRY(v3_dma_clean_range)
	mov	pc, lr

/*
@@ -129,7 +104,7 @@ ENTRY(v3_dma_clean_range)
 */
ENTRY(v3_dma_unmap_area)
	teq	r2, #DMA_TO_DEVICE
	bne	v3_dma_inv_range
	bne	v3_dma_flush_range
	/* FALLTHROUGH */

/*
@@ -155,7 +130,5 @@ ENTRY(v3_cache_fns)
	.long	v3_flush_kern_dcache_area
	.long	v3_dma_map_area
	.long	v3_dma_unmap_area
	.long	v3_dma_inv_range
	.long	v3_dma_clean_range
	.long	v3_dma_flush_range
	.size	v3_cache_fns, . - v3_cache_fns
+1 −28
Original line number Diff line number Diff line
@@ -93,20 +93,6 @@ ENTRY(v4_coherent_user_range)
ENTRY(v4_flush_kern_dcache_area)
	/* FALLTHROUGH */

/*
 *	dma_inv_range(start, end)
 *
 *	Invalidate (discard) the specified virtual address range.
 *	May not write back any entries.  If 'start' or 'end'
 *	are not cache line aligned, those lines must be written
 *	back.
 *
 *	- start  - virtual start address
 *	- end	 - virtual end address
 */
ENTRY(v4_dma_inv_range)
	/* FALLTHROUGH */

/*
 *	dma_flush_range(start, end)
 *
@@ -120,17 +106,6 @@ ENTRY(v4_dma_flush_range)
	mov	r0, #0
	mcr	p15, 0, r0, c7, c7, 0		@ flush ID cache
#endif
	/* FALLTHROUGH */

/*
 *	dma_clean_range(start, end)
 *
 *	Clean (write back) the specified virtual address range.
 *
 *	- start  - virtual start address
 *	- end	 - virtual end address
 */
ENTRY(v4_dma_clean_range)
	mov	pc, lr

/*
@@ -141,7 +116,7 @@ ENTRY(v4_dma_clean_range)
 */
ENTRY(v4_dma_unmap_area)
	teq	r2, #DMA_TO_DEVICE
	bne	v4_dma_inv_range
	bne	v4_dma_flush_range
	/* FALLTHROUGH */

/*
@@ -167,7 +142,5 @@ ENTRY(v4_cache_fns)
	.long	v4_flush_kern_dcache_area
	.long	v4_dma_map_area
	.long	v4_dma_unmap_area
	.long	v4_dma_inv_range
	.long	v4_dma_clean_range
	.long	v4_dma_flush_range
	.size	v4_cache_fns, . - v4_cache_fns
+2 −4
Original line number Diff line number Diff line
@@ -173,7 +173,7 @@ ENTRY(v4wb_coherent_user_range)
 *	- start  - virtual start address
 *	- end	 - virtual end address
 */
ENTRY(v4wb_dma_inv_range)
v4wb_dma_inv_range:
	tst	r0, #CACHE_DLINESIZE - 1
	bic	r0, r0, #CACHE_DLINESIZE - 1
	mcrne	p15, 0, r0, c7, c10, 1		@ clean D entry
@@ -194,7 +194,7 @@ ENTRY(v4wb_dma_inv_range)
 *	- start  - virtual start address
 *	- end	 - virtual end address
 */
ENTRY(v4wb_dma_clean_range)
v4wb_dma_clean_range:
	bic	r0, r0, #CACHE_DLINESIZE - 1
1:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
	add	r0, r0, #CACHE_DLINESIZE
@@ -252,7 +252,5 @@ ENTRY(v4wb_cache_fns)
	.long	v4wb_flush_kern_dcache_area
	.long	v4wb_dma_map_area
	.long	v4wb_dma_unmap_area
	.long	v4wb_dma_inv_range
	.long	v4wb_dma_clean_range
	.long	v4wb_dma_flush_range
	.size	v4wb_cache_fns, . - v4wb_cache_fns
Loading