Loading arch/arc/Kconfig +3 −0 Original line number Original line Diff line number Diff line Loading @@ -468,6 +468,9 @@ config ARCH_PHYS_ADDR_T_64BIT config ARCH_DMA_ADDR_T_64BIT config ARCH_DMA_ADDR_T_64BIT bool bool config ARC_PLAT_NEEDS_PHYS_TO_DMA bool config ARC_CURR_IN_REG config ARC_CURR_IN_REG bool "Dedicate Register r25 for current_task pointer" bool "Dedicate Register r25 for current_task pointer" default y default y Loading arch/arc/include/asm/dma-mapping.h +7 −0 Original line number Original line Diff line number Diff line Loading @@ -11,6 +11,13 @@ #ifndef ASM_ARC_DMA_MAPPING_H #ifndef ASM_ARC_DMA_MAPPING_H #define ASM_ARC_DMA_MAPPING_H #define ASM_ARC_DMA_MAPPING_H #ifndef CONFIG_ARC_PLAT_NEEDS_PHYS_TO_DMA #define plat_dma_to_phys(dev, dma_handle) ((phys_addr_t)(dma_handle)) #define plat_phys_to_dma(dev, paddr) ((dma_addr_t)(paddr)) #else #include <plat/dma.h> #endif extern struct dma_map_ops arc_dma_ops; extern struct dma_map_ops arc_dma_ops; static inline struct dma_map_ops *get_dma_ops(struct device *dev) static inline struct dma_map_ops *get_dma_ops(struct device *dev) Loading arch/arc/mm/dma.c +4 −5 Original line number Original line Diff line number Diff line Loading @@ -60,8 +60,7 @@ static void *arc_dma_alloc(struct device *dev, size_t size, /* This is linear addr (0x8000_0000 based) */ /* This is linear addr (0x8000_0000 based) */ paddr = page_to_phys(page); paddr = page_to_phys(page); /* For now bus address is exactly same as paddr */ *dma_handle = plat_phys_to_dma(dev, paddr); *dma_handle = paddr; /* This is kernel Virtual address (0x7000_0000 based) */ /* This is kernel Virtual address (0x7000_0000 based) */ if (need_kvaddr) { if (need_kvaddr) { Loading Loading @@ -134,7 +133,7 @@ static dma_addr_t arc_dma_map_page(struct device *dev, struct page *page, { { phys_addr_t paddr = page_to_phys(page) + offset; phys_addr_t paddr = page_to_phys(page) + offset; _dma_cache_sync(paddr, size, dir); _dma_cache_sync(paddr, size, dir); return (dma_addr_t)paddr; return plat_phys_to_dma(dev, paddr); } } static int arc_dma_map_sg(struct device *dev, struct scatterlist *sg, static int arc_dma_map_sg(struct device *dev, struct scatterlist *sg, Loading @@ -153,13 +152,13 @@ static int arc_dma_map_sg(struct device *dev, struct scatterlist *sg, static void arc_dma_sync_single_for_cpu(struct device *dev, static void arc_dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size, enum dma_data_direction dir) dma_addr_t dma_handle, size_t size, enum dma_data_direction dir) { { _dma_cache_sync(dma_handle, size, DMA_FROM_DEVICE); _dma_cache_sync(plat_dma_to_phys(dev, dma_handle), size, DMA_FROM_DEVICE); } } static void arc_dma_sync_single_for_device(struct device *dev, static void arc_dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, size_t size, enum dma_data_direction dir) dma_addr_t dma_handle, size_t size, enum dma_data_direction dir) { { _dma_cache_sync(dma_handle, size, DMA_TO_DEVICE); _dma_cache_sync(plat_dma_to_phys(dev, dma_handle), size, DMA_TO_DEVICE); } } static void arc_dma_sync_sg_for_cpu(struct device *dev, static void arc_dma_sync_sg_for_cpu(struct device *dev, Loading Loading
arch/arc/Kconfig +3 −0 Original line number Original line Diff line number Diff line Loading @@ -468,6 +468,9 @@ config ARCH_PHYS_ADDR_T_64BIT config ARCH_DMA_ADDR_T_64BIT config ARCH_DMA_ADDR_T_64BIT bool bool config ARC_PLAT_NEEDS_PHYS_TO_DMA bool config ARC_CURR_IN_REG config ARC_CURR_IN_REG bool "Dedicate Register r25 for current_task pointer" bool "Dedicate Register r25 for current_task pointer" default y default y Loading
arch/arc/include/asm/dma-mapping.h +7 −0 Original line number Original line Diff line number Diff line Loading @@ -11,6 +11,13 @@ #ifndef ASM_ARC_DMA_MAPPING_H #ifndef ASM_ARC_DMA_MAPPING_H #define ASM_ARC_DMA_MAPPING_H #define ASM_ARC_DMA_MAPPING_H #ifndef CONFIG_ARC_PLAT_NEEDS_PHYS_TO_DMA #define plat_dma_to_phys(dev, dma_handle) ((phys_addr_t)(dma_handle)) #define plat_phys_to_dma(dev, paddr) ((dma_addr_t)(paddr)) #else #include <plat/dma.h> #endif extern struct dma_map_ops arc_dma_ops; extern struct dma_map_ops arc_dma_ops; static inline struct dma_map_ops *get_dma_ops(struct device *dev) static inline struct dma_map_ops *get_dma_ops(struct device *dev) Loading
arch/arc/mm/dma.c +4 −5 Original line number Original line Diff line number Diff line Loading @@ -60,8 +60,7 @@ static void *arc_dma_alloc(struct device *dev, size_t size, /* This is linear addr (0x8000_0000 based) */ /* This is linear addr (0x8000_0000 based) */ paddr = page_to_phys(page); paddr = page_to_phys(page); /* For now bus address is exactly same as paddr */ *dma_handle = plat_phys_to_dma(dev, paddr); *dma_handle = paddr; /* This is kernel Virtual address (0x7000_0000 based) */ /* This is kernel Virtual address (0x7000_0000 based) */ if (need_kvaddr) { if (need_kvaddr) { Loading Loading @@ -134,7 +133,7 @@ static dma_addr_t arc_dma_map_page(struct device *dev, struct page *page, { { phys_addr_t paddr = page_to_phys(page) + offset; phys_addr_t paddr = page_to_phys(page) + offset; _dma_cache_sync(paddr, size, dir); _dma_cache_sync(paddr, size, dir); return (dma_addr_t)paddr; return plat_phys_to_dma(dev, paddr); } } static int arc_dma_map_sg(struct device *dev, struct scatterlist *sg, static int arc_dma_map_sg(struct device *dev, struct scatterlist *sg, Loading @@ -153,13 +152,13 @@ static int arc_dma_map_sg(struct device *dev, struct scatterlist *sg, static void arc_dma_sync_single_for_cpu(struct device *dev, static void arc_dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size, enum dma_data_direction dir) dma_addr_t dma_handle, size_t size, enum dma_data_direction dir) { { _dma_cache_sync(dma_handle, size, DMA_FROM_DEVICE); _dma_cache_sync(plat_dma_to_phys(dev, dma_handle), size, DMA_FROM_DEVICE); } } static void arc_dma_sync_single_for_device(struct device *dev, static void arc_dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, size_t size, enum dma_data_direction dir) dma_addr_t dma_handle, size_t size, enum dma_data_direction dir) { { _dma_cache_sync(dma_handle, size, DMA_TO_DEVICE); _dma_cache_sync(plat_dma_to_phys(dev, dma_handle), size, DMA_TO_DEVICE); } } static void arc_dma_sync_sg_for_cpu(struct device *dev, static void arc_dma_sync_sg_for_cpu(struct device *dev, Loading