Loading arch/sh/kernel/io.c +67 −0 Original line number Original line Diff line number Diff line Loading @@ -61,6 +61,73 @@ void memset_io(volatile void __iomem *dst, int c, unsigned long count) } } EXPORT_SYMBOL(memset_io); EXPORT_SYMBOL(memset_io); void __raw_readsl(unsigned long addr, void *datap, int len) { u32 *data; for (data = datap; (len != 0) && (((u32)data & 0x1f) != 0); len--) *data++ = ctrl_inl(addr); if (likely(len >= (0x20 >> 2))) { int tmp2, tmp3, tmp4, tmp5, tmp6; __asm__ __volatile__( "1: \n\t" "mov.l @%7, r0 \n\t" "mov.l @%7, %2 \n\t" #ifdef CONFIG_CPU_SH4 "movca.l r0, @%0 \n\t" #else "mov.l r0, @%0 \n\t" #endif "mov.l @%7, %3 \n\t" "mov.l @%7, %4 \n\t" "mov.l @%7, %5 \n\t" "mov.l @%7, %6 \n\t" "mov.l @%7, r7 \n\t" "mov.l @%7, r0 \n\t" "mov.l %2, @(0x04,%0) \n\t" "mov #0x20>>2, %2 \n\t" "mov.l %3, @(0x08,%0) \n\t" "sub %2, %1 \n\t" "mov.l %4, @(0x0c,%0) \n\t" "cmp/hi %1, %2 ! T if 32 > len \n\t" "mov.l %5, @(0x10,%0) \n\t" "mov.l %6, @(0x14,%0) \n\t" "mov.l r7, @(0x18,%0) \n\t" "mov.l r0, @(0x1c,%0) \n\t" "bf.s 1b \n\t" " add #0x20, %0 \n\t" : "=&r" (data), "=&r" (len), "=&r" (tmp2), "=&r" (tmp3), "=&r" (tmp4), "=&r" (tmp5), "=&r" (tmp6) : "r"(addr), "0" (data), "1" (len) : "r0", "r7", "t", "memory"); } for (; len != 0; len--) *data++ = ctrl_inl(addr); } EXPORT_SYMBOL(__raw_readsl); void __raw_writesl(unsigned long addr, const void *data, int len) { if (likely(len != 0)) { int tmp1; __asm__ __volatile__ ( "1: \n\t" "mov.l @%0+, %1 \n\t" "dt %3 \n\t" "bf.s 1b \n\t" " mov.l %1, @%4 \n\t" : "=&r" (data), "=&r" (tmp1) : "0" (data), "r" (len), "r"(addr) : "t", "memory"); } } EXPORT_SYMBOL(__raw_writesl); void __iomem *ioport_map(unsigned long port, unsigned int nr) void __iomem *ioport_map(unsigned long port, unsigned int nr) { { return sh_mv.mv_ioport_map(port, nr); return sh_mv.mv_ioport_map(port, nr); Loading include/asm-sh/io.h +6 −0 Original line number Original line Diff line number Diff line Loading @@ -107,6 +107,9 @@ #define __raw_writew(v, a) __writew(v, (void __iomem *)(a)) #define __raw_writew(v, a) __writew(v, (void __iomem *)(a)) #define __raw_writel(v, a) __writel(v, (void __iomem *)(a)) #define __raw_writel(v, a) __writel(v, (void __iomem *)(a)) void __raw_writesl(unsigned long addr, const void *data, int longlen); void __raw_readsl(unsigned long addr, void *data, int longlen); /* /* * The platform header files may define some of these macros to use * The platform header files may define some of these macros to use * the inlined versions where appropriate. These macros may also be * the inlined versions where appropriate. These macros may also be Loading @@ -132,6 +135,9 @@ # define writel(v,a) ({ __raw_writel((v),(a)); mb(); }) # define writel(v,a) ({ __raw_writel((v),(a)); mb(); }) #endif #endif #define writesl __raw_writesl #define readsl __raw_readsl #define readb_relaxed(a) readb(a) #define readb_relaxed(a) readb(a) #define readw_relaxed(a) readw(a) #define readw_relaxed(a) readw(a) #define readl_relaxed(a) readl(a) #define readl_relaxed(a) readl(a) Loading Loading
arch/sh/kernel/io.c +67 −0 Original line number Original line Diff line number Diff line Loading @@ -61,6 +61,73 @@ void memset_io(volatile void __iomem *dst, int c, unsigned long count) } } EXPORT_SYMBOL(memset_io); EXPORT_SYMBOL(memset_io); void __raw_readsl(unsigned long addr, void *datap, int len) { u32 *data; for (data = datap; (len != 0) && (((u32)data & 0x1f) != 0); len--) *data++ = ctrl_inl(addr); if (likely(len >= (0x20 >> 2))) { int tmp2, tmp3, tmp4, tmp5, tmp6; __asm__ __volatile__( "1: \n\t" "mov.l @%7, r0 \n\t" "mov.l @%7, %2 \n\t" #ifdef CONFIG_CPU_SH4 "movca.l r0, @%0 \n\t" #else "mov.l r0, @%0 \n\t" #endif "mov.l @%7, %3 \n\t" "mov.l @%7, %4 \n\t" "mov.l @%7, %5 \n\t" "mov.l @%7, %6 \n\t" "mov.l @%7, r7 \n\t" "mov.l @%7, r0 \n\t" "mov.l %2, @(0x04,%0) \n\t" "mov #0x20>>2, %2 \n\t" "mov.l %3, @(0x08,%0) \n\t" "sub %2, %1 \n\t" "mov.l %4, @(0x0c,%0) \n\t" "cmp/hi %1, %2 ! T if 32 > len \n\t" "mov.l %5, @(0x10,%0) \n\t" "mov.l %6, @(0x14,%0) \n\t" "mov.l r7, @(0x18,%0) \n\t" "mov.l r0, @(0x1c,%0) \n\t" "bf.s 1b \n\t" " add #0x20, %0 \n\t" : "=&r" (data), "=&r" (len), "=&r" (tmp2), "=&r" (tmp3), "=&r" (tmp4), "=&r" (tmp5), "=&r" (tmp6) : "r"(addr), "0" (data), "1" (len) : "r0", "r7", "t", "memory"); } for (; len != 0; len--) *data++ = ctrl_inl(addr); } EXPORT_SYMBOL(__raw_readsl); void __raw_writesl(unsigned long addr, const void *data, int len) { if (likely(len != 0)) { int tmp1; __asm__ __volatile__ ( "1: \n\t" "mov.l @%0+, %1 \n\t" "dt %3 \n\t" "bf.s 1b \n\t" " mov.l %1, @%4 \n\t" : "=&r" (data), "=&r" (tmp1) : "0" (data), "r" (len), "r"(addr) : "t", "memory"); } } EXPORT_SYMBOL(__raw_writesl); void __iomem *ioport_map(unsigned long port, unsigned int nr) void __iomem *ioport_map(unsigned long port, unsigned int nr) { { return sh_mv.mv_ioport_map(port, nr); return sh_mv.mv_ioport_map(port, nr); Loading
include/asm-sh/io.h +6 −0 Original line number Original line Diff line number Diff line Loading @@ -107,6 +107,9 @@ #define __raw_writew(v, a) __writew(v, (void __iomem *)(a)) #define __raw_writew(v, a) __writew(v, (void __iomem *)(a)) #define __raw_writel(v, a) __writel(v, (void __iomem *)(a)) #define __raw_writel(v, a) __writel(v, (void __iomem *)(a)) void __raw_writesl(unsigned long addr, const void *data, int longlen); void __raw_readsl(unsigned long addr, void *data, int longlen); /* /* * The platform header files may define some of these macros to use * The platform header files may define some of these macros to use * the inlined versions where appropriate. These macros may also be * the inlined versions where appropriate. These macros may also be Loading @@ -132,6 +135,9 @@ # define writel(v,a) ({ __raw_writel((v),(a)); mb(); }) # define writel(v,a) ({ __raw_writel((v),(a)); mb(); }) #endif #endif #define writesl __raw_writesl #define readsl __raw_readsl #define readb_relaxed(a) readb(a) #define readb_relaxed(a) readb(a) #define readw_relaxed(a) readw(a) #define readw_relaxed(a) readw(a) #define readl_relaxed(a) readl(a) #define readl_relaxed(a) readl(a) Loading