aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorStephen Rothwell <sfr@canb.auug.org.au>2005-11-10 15:51:14 +1100
committerStephen Rothwell <sfr@canb.auug.org.au>2005-11-10 15:51:14 +1100
commit06a98dba0d4b4f2f9b1f35f636beb166d6cbde34 (patch)
tree5fabe4727384d971c9e83f93b27254d1a9343c18
parent676e2497f87501fb190a5b06103d396ca9147373 (diff)
powerpc: implement atomic64_t on ppc64
Signed-off-by: Stephen Rothwell <sfr@canb.auug.org.au>
-rw-r--r--include/asm-powerpc/atomic.h178
1 files changed, 178 insertions, 0 deletions
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h
index c5b12fd2b46..9c0b372a46e 100644
--- a/include/asm-powerpc/atomic.h
+++ b/include/asm-powerpc/atomic.h
@@ -197,5 +197,183 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
#define smp_mb__before_atomic_inc() smp_mb()
#define smp_mb__after_atomic_inc() smp_mb()
+#ifdef __powerpc64__
+
+typedef struct { volatile long counter; } atomic64_t;
+
+#define ATOMIC64_INIT(i) { (i) }
+
+#define atomic64_read(v) ((v)->counter)
+#define atomic64_set(v,i) (((v)->counter) = (i))
+
+static __inline__ void atomic64_add(long a, atomic64_t *v)
+{
+ long t;
+
+ __asm__ __volatile__(
+"1: ldarx %0,0,%3 # atomic64_add\n\
+ add %0,%2,%0\n\
+ stdcx. %0,0,%3 \n\
+ bne- 1b"
+ : "=&r" (t), "=m" (v->counter)
+ : "r" (a), "r" (&v->counter), "m" (v->counter)
+ : "cc");
+}
+
+static __inline__ long atomic64_add_return(long a, atomic64_t *v)
+{
+ long t;
+
+ __asm__ __volatile__(
+ EIEIO_ON_SMP
+"1: ldarx %0,0,%2 # atomic64_add_return\n\
+ add %0,%1,%0\n\
+ stdcx. %0,0,%2 \n\
+ bne- 1b"
+ ISYNC_ON_SMP
+ : "=&r" (t)
+ : "r" (a), "r" (&v->counter)
+ : "cc", "memory");
+
+ return t;
+}
+
+#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
+
+static __inline__ void atomic64_sub(long a, atomic64_t *v)
+{
+ long t;
+
+ __asm__ __volatile__(
+"1: ldarx %0,0,%3 # atomic64_sub\n\
+ subf %0,%2,%0\n\
+ stdcx. %0,0,%3 \n\
+ bne- 1b"
+ : "=&r" (t), "=m" (v->counter)
+ : "r" (a), "r" (&v->counter), "m" (v->counter)
+ : "cc");
+}
+
+static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
+{
+ long t;
+
+ __asm__ __volatile__(
+ EIEIO_ON_SMP
+"1: ldarx %0,0,%2 # atomic64_sub_return\n\
+ subf %0,%1,%0\n\
+ stdcx. %0,0,%2 \n\
+ bne- 1b"
+ ISYNC_ON_SMP
+ : "=&r" (t)
+ : "r" (a), "r" (&v->counter)
+ : "cc", "memory");
+
+ return t;
+}
+
+static __inline__ void atomic64_inc(atomic64_t *v)
+{
+ long t;
+
+ __asm__ __volatile__(
+"1: ldarx %0,0,%2 # atomic64_inc\n\
+ addic %0,%0,1\n\
+ stdcx. %0,0,%2 \n\
+ bne- 1b"
+ : "=&r" (t), "=m" (v->counter)
+ : "r" (&v->counter), "m" (v->counter)
+ : "cc");
+}
+
+static __inline__ long atomic64_inc_return(atomic64_t *v)
+{
+ long t;
+
+ __asm__ __volatile__(
+ EIEIO_ON_SMP
+"1: ldarx %0,0,%1 # atomic64_inc_return\n\
+ addic %0,%0,1\n\
+ stdcx. %0,0,%1 \n\
+ bne- 1b"
+ ISYNC_ON_SMP
+ : "=&r" (t)
+ : "r" (&v->counter)
+ : "cc", "memory");
+
+ return t;
+}
+
+/*
+ * atomic64_inc_and_test - increment and test
+ * @v: pointer of type atomic64_t
+ *
+ * Atomically increments @v by 1
+ * and returns true if the result is zero, or false for all
+ * other cases.
+ */
+#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
+
+static __inline__ void atomic64_dec(atomic64_t *v)
+{
+ long t;
+
+ __asm__ __volatile__(
+"1: ldarx %0,0,%2 # atomic64_dec\n\
+ addic %0,%0,-1\n\
+ stdcx. %0,0,%2\n\
+ bne- 1b"
+ : "=&r" (t), "=m" (v->counter)
+ : "r" (&v->counter), "m" (v->counter)
+ : "cc");
+}
+
+static __inline__ long atomic64_dec_return(atomic64_t *v)
+{
+ long t;
+
+ __asm__ __volatile__(
+ EIEIO_ON_SMP
+"1: ldarx %0,0,%1 # atomic64_dec_return\n\
+ addic %0,%0,-1\n\
+ stdcx. %0,0,%1\n\
+ bne- 1b"
+ ISYNC_ON_SMP
+ : "=&r" (t)
+ : "r" (&v->counter)
+ : "cc", "memory");
+
+ return t;
+}
+
+#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
+#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
+
+/*
+ * Atomically test *v and decrement if it is greater than 0.
+ * The function returns the old value of *v minus 1.
+ */
+static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
+{
+ long t;
+
+ __asm__ __volatile__(
+ EIEIO_ON_SMP
+"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
+ addic. %0,%0,-1\n\
+ blt- 2f\n\
+ stdcx. %0,0,%1\n\
+ bne- 1b"
+ ISYNC_ON_SMP
+ "\n\
+2:" : "=&r" (t)
+ : "r" (&v->counter)
+ : "cc", "memory");
+
+ return t;
+}
+
+#endif /* __powerpc64__ */
+
#endif /* __KERNEL__ */
#endif /* _ASM_POWERPC_ATOMIC_H_ */