aboutsummaryrefslogtreecommitdiff
path: root/arch/arm/lib/memmove.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/lib/memmove.S')
-rw-r--r--arch/arm/lib/memmove.S14
1 files changed, 3 insertions, 11 deletions
diff --git a/arch/arm/lib/memmove.S b/arch/arm/lib/memmove.S
index ef7fddc14ac..2e301b7bd8f 100644
--- a/arch/arm/lib/memmove.S
+++ b/arch/arm/lib/memmove.S
@@ -13,14 +13,6 @@
#include <linux/linkage.h>
#include <asm/assembler.h>
-/*
- * This can be used to enable code to cacheline align the source pointer.
- * Experiments on tested architectures (StrongARM and XScale) didn't show
- * this a worthwhile thing to do. That might be different in the future.
- */
-//#define CALGN(code...) code
-#define CALGN(code...)
-
.text
/*
@@ -55,11 +47,12 @@ ENTRY(memmove)
stmfd sp!, {r5 - r8}
blt 5f
- CALGN( ands ip, r1, #31 )
+ CALGN( ands ip, r0, #31 )
CALGN( sbcnes r4, ip, r2 ) @ C is always set here
CALGN( bcs 2f )
CALGN( adr r4, 6f )
CALGN( subs r2, r2, ip ) @ C is set here
+ CALGN( rsb ip, ip, #32 )
CALGN( add pc, r4, ip )
PLD( pld [r1, #-4] )
@@ -138,8 +131,7 @@ ENTRY(memmove)
subs r2, r2, #28
blt 14f
- CALGN( ands ip, r1, #31 )
- CALGN( rsb ip, ip, #32 )
+ CALGN( ands ip, r0, #31 )
CALGN( sbcnes r4, ip, r2 ) @ C is always set here
CALGN( subcc r2, r2, ip )
CALGN( bcc 15f )