aboutsummaryrefslogtreecommitdiff
path: root/include/asm-powerpc/bitops.h
diff options
context:
space:
mode:
authorNick Piggin <npiggin@suse.de>2007-10-18 03:06:53 -0700
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-10-18 14:37:29 -0700
commit66ffb04ca5ec43ca763e8a9802ca3244f5519ae1 (patch)
tree26df9afbce84c5f478044de16f90a80fc182bd61 /include/asm-powerpc/bitops.h
parent728697cd6b3c8c12d0ad5ebdce5616ef5d25bf18 (diff)
powerpc: lock bitops
Add non-trivial lock bitops implementation for powerpc. Signed-off-by: Nick Piggin <npiggin@suse.de> Acked-by: Benjamin Herrenschmidt <benh@kernel.crashing.org> Cc: Paul Mackerras <paulus@samba.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-powerpc/bitops.h')
-rw-r--r--include/asm-powerpc/bitops.h46
1 files changed, 45 insertions, 1 deletions
diff --git a/include/asm-powerpc/bitops.h b/include/asm-powerpc/bitops.h
index 1d4c16613d2..e85c3e078ba 100644
--- a/include/asm-powerpc/bitops.h
+++ b/include/asm-powerpc/bitops.h
@@ -86,6 +86,24 @@ static __inline__ void clear_bit(int nr, volatile unsigned long *addr)
: "cc" );
}
+static __inline__ void clear_bit_unlock(int nr, volatile unsigned long *addr)
+{
+ unsigned long old;
+ unsigned long mask = BITOP_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
+
+ __asm__ __volatile__(
+ LWSYNC_ON_SMP
+"1:" PPC_LLARX "%0,0,%3 # clear_bit_unlock\n"
+ "andc %0,%0,%2\n"
+ PPC405_ERR77(0,%3)
+ PPC_STLCX "%0,0,%3\n"
+ "bne- 1b"
+ : "=&r" (old), "+m" (*p)
+ : "r" (mask), "r" (p)
+ : "cc", "memory");
+}
+
static __inline__ void change_bit(int nr, volatile unsigned long *addr)
{
unsigned long old;
@@ -125,6 +143,27 @@ static __inline__ int test_and_set_bit(unsigned long nr,
return (old & mask) != 0;
}
+static __inline__ int test_and_set_bit_lock(unsigned long nr,
+ volatile unsigned long *addr)
+{
+ unsigned long old, t;
+ unsigned long mask = BITOP_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
+
+ __asm__ __volatile__(
+"1:" PPC_LLARX "%0,0,%3 # test_and_set_bit_lock\n"
+ "or %1,%0,%2 \n"
+ PPC405_ERR77(0,%3)
+ PPC_STLCX "%1,0,%3 \n"
+ "bne- 1b"
+ ISYNC_ON_SMP
+ : "=&r" (old), "=&r" (t)
+ : "r" (mask), "r" (p)
+ : "cc", "memory");
+
+ return (old & mask) != 0;
+}
+
static __inline__ int test_and_clear_bit(unsigned long nr,
volatile unsigned long *addr)
{
@@ -185,6 +224,12 @@ static __inline__ void set_bits(unsigned long mask, unsigned long *addr)
#include <asm-generic/bitops/non-atomic.h>
+static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr)
+{
+ __asm__ __volatile__(LWSYNC_ON_SMP "" ::: "memory");
+ __clear_bit(nr, addr);
+}
+
/*
* Return the zero-based bit position (LE, not IBM bit numbering) of
* the most significant 1-bit in a double word.
@@ -266,7 +311,6 @@ static __inline__ int fls(unsigned int x)
#include <asm-generic/bitops/fls64.h>
#include <asm-generic/bitops/hweight.h>
-#include <asm-generic/bitops/lock.h>
#define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0)
unsigned long find_next_zero_bit(const unsigned long *addr,