aboutsummaryrefslogtreecommitdiff
path: root/include/asm-powerpc/eeh.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-powerpc/eeh.h')
-rw-r--r--include/asm-powerpc/eeh.h95
1 files changed, 5 insertions, 90 deletions
diff --git a/include/asm-powerpc/eeh.h b/include/asm-powerpc/eeh.h
index 66481bbf270..b886bec6701 100644
--- a/include/asm-powerpc/eeh.h
+++ b/include/asm-powerpc/eeh.h
@@ -169,104 +169,19 @@ static inline u64 eeh_readq_be(const volatile void __iomem *addr)
return val;
}
-#define EEH_CHECK_ALIGN(v,a) \
- ((((unsigned long)(v)) & ((a) - 1)) == 0)
-
-static inline void eeh_memset_io(volatile void __iomem *addr, int c,
- unsigned long n)
-{
- void *p = (void __force *)addr;
- u32 lc = c;
- lc |= lc << 8;
- lc |= lc << 16;
-
- __asm__ __volatile__ ("sync" : : : "memory");
- while(n && !EEH_CHECK_ALIGN(p, 4)) {
- *((volatile u8 *)p) = c;
- p++;
- n--;
- }
- while(n >= 4) {
- *((volatile u32 *)p) = lc;
- p += 4;
- n -= 4;
- }
- while(n) {
- *((volatile u8 *)p) = c;
- p++;
- n--;
- }
- __asm__ __volatile__ ("sync" : : : "memory");
-}
-static inline void eeh_memcpy_fromio(void *dest, const volatile void __iomem *src,
+static inline void eeh_memcpy_fromio(void *dest, const
+ volatile void __iomem *src,
unsigned long n)
{
- void *vsrc = (void __force *) src;
- void *destsave = dest;
- unsigned long nsave = n;
-
- __asm__ __volatile__ ("sync" : : : "memory");
- while(n && (!EEH_CHECK_ALIGN(vsrc, 4) || !EEH_CHECK_ALIGN(dest, 4))) {
- *((u8 *)dest) = *((volatile u8 *)vsrc);
- __asm__ __volatile__ ("eieio" : : : "memory");
- vsrc++;
- dest++;
- n--;
- }
- while(n > 4) {
- *((u32 *)dest) = *((volatile u32 *)vsrc);
- __asm__ __volatile__ ("eieio" : : : "memory");
- vsrc += 4;
- dest += 4;
- n -= 4;
- }
- while(n) {
- *((u8 *)dest) = *((volatile u8 *)vsrc);
- __asm__ __volatile__ ("eieio" : : : "memory");
- vsrc++;
- dest++;
- n--;
- }
- __asm__ __volatile__ ("sync" : : : "memory");
+ _memcpy_fromio(dest, src, n);
/* Look for ffff's here at dest[n]. Assume that at least 4 bytes
* were copied. Check all four bytes.
*/
- if ((nsave >= 4) &&
- (EEH_POSSIBLE_ERROR((*((u32 *) destsave+nsave-4)), u32))) {
- eeh_check_failure(src, (*((u32 *) destsave+nsave-4)));
- }
+ if (n >= 4 && EEH_POSSIBLE_ERROR(*((u32 *)(dest + n - 4)), u32))
+ eeh_check_failure(src, *((u32 *)(dest + n - 4)));
}
-static inline void eeh_memcpy_toio(volatile void __iomem *dest, const void *src,
- unsigned long n)
-{
- void *vdest = (void __force *) dest;
-
- __asm__ __volatile__ ("sync" : : : "memory");
- while(n && (!EEH_CHECK_ALIGN(vdest, 4) || !EEH_CHECK_ALIGN(src, 4))) {
- *((volatile u8 *)vdest) = *((u8 *)src);
- src++;
- vdest++;
- n--;
- }
- while(n > 4) {
- *((volatile u32 *)vdest) = *((volatile u32 *)src);
- src += 4;
- vdest += 4;
- n-=4;
- }
- while(n) {
- *((volatile u8 *)vdest) = *((u8 *)src);
- src++;
- vdest++;
- n--;
- }
- __asm__ __volatile__ ("sync" : : : "memory");
-}
-
-#undef EEH_CHECK_ALIGN
-
/* in-string eeh macros */
static inline void eeh_readsb(const volatile void __iomem *addr, void * buf,
int ns)