summaryrefslogtreecommitdiff
path: root/arch/powerpc/kernel/io.c
diff options
context:
space:
mode:
authorBenjamin Herrenschmidt <benh@kernel.crashing.org>2006-11-13 09:27:39 +1100
committerPaul Mackerras <paulus@samba.org>2006-12-04 20:39:05 +1100
commit68a64357d15ae4f596e92715719071952006e83c (patch)
treedee519239225e92169ef77e4fad3be25c4dffe9d /arch/powerpc/kernel/io.c
parent3d1ea8e8cb4d497a2dd73176cc82095b8f193589 (diff)
[POWERPC] Merge 32 and 64 bits asm-powerpc/io.h
powerpc: Merge 32 and 64 bits asm-powerpc/io.h The rework on io.h done for the new hookable accessors made it easier, so I just finished the work and merged 32 and 64 bits io.h for arch/powerpc. arch/ppc still uses the old version in asm-ppc, there is just too much gunk in there that I really can't be bothered trying to cleanup. Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org> Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc/kernel/io.c')
-rw-r--r--arch/powerpc/kernel/io.c87
1 files changed, 87 insertions, 0 deletions
diff --git a/arch/powerpc/kernel/io.c b/arch/powerpc/kernel/io.c
index c1aa07524c26..34ae11494ddc 100644
--- a/arch/powerpc/kernel/io.c
+++ b/arch/powerpc/kernel/io.c
@@ -117,3 +117,90 @@ void _outsl_ns(volatile u32 __iomem *port, const void *buf, long count)
asm volatile("sync");
}
EXPORT_SYMBOL(_outsl_ns);
+
+#define IO_CHECK_ALIGN(v,a) ((((unsigned long)(v)) & ((a) - 1)) == 0)
+
+void _memset_io(volatile void __iomem *addr, int c, unsigned long n)
+{
+ void *p = (void __force *)addr;
+ u32 lc = c;
+ lc |= lc << 8;
+ lc |= lc << 16;
+
+ __asm__ __volatile__ ("sync" : : : "memory");
+ while(n && !IO_CHECK_ALIGN(p, 4)) {
+ *((volatile u8 *)p) = c;
+ p++;
+ n--;
+ }
+ while(n >= 4) {
+ *((volatile u32 *)p) = lc;
+ p += 4;
+ n -= 4;
+ }
+ while(n) {
+ *((volatile u8 *)p) = c;
+ p++;
+ n--;
+ }
+ __asm__ __volatile__ ("sync" : : : "memory");
+}
+EXPORT_SYMBOL(_memset_io);
+
+void _memcpy_fromio(void *dest, const volatile void __iomem *src,
+ unsigned long n)
+{
+ void *vsrc = (void __force *) src;
+
+ __asm__ __volatile__ ("sync" : : : "memory");
+ while(n && (!IO_CHECK_ALIGN(vsrc, 4) || !IO_CHECK_ALIGN(dest, 4))) {
+ *((u8 *)dest) = *((volatile u8 *)vsrc);
+ __asm__ __volatile__ ("eieio" : : : "memory");
+ vsrc++;
+ dest++;
+ n--;
+ }
+ while(n > 4) {
+ *((u32 *)dest) = *((volatile u32 *)vsrc);
+ __asm__ __volatile__ ("eieio" : : : "memory");
+ vsrc += 4;
+ dest += 4;
+ n -= 4;
+ }
+ while(n) {
+ *((u8 *)dest) = *((volatile u8 *)vsrc);
+ __asm__ __volatile__ ("eieio" : : : "memory");
+ vsrc++;
+ dest++;
+ n--;
+ }
+ __asm__ __volatile__ ("sync" : : : "memory");
+}
+EXPORT_SYMBOL(_memcpy_fromio);
+
+void _memcpy_toio(volatile void __iomem *dest, const void *src, unsigned long n)
+{
+ void *vdest = (void __force *) dest;
+
+ __asm__ __volatile__ ("sync" : : : "memory");
+ while(n && (!IO_CHECK_ALIGN(vdest, 4) || !IO_CHECK_ALIGN(src, 4))) {
+ *((volatile u8 *)vdest) = *((u8 *)src);
+ src++;
+ vdest++;
+ n--;
+ }
+ while(n > 4) {
+ *((volatile u32 *)vdest) = *((volatile u32 *)src);
+ src += 4;
+ vdest += 4;
+ n-=4;
+ }
+ while(n) {
+ *((volatile u8 *)vdest) = *((u8 *)src);
+ src++;
+ vdest++;
+ n--;
+ }
+ __asm__ __volatile__ ("sync" : : : "memory");
+}
+EXPORT_SYMBOL(_memcpy_toio);