summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMathieu Desnoyers <mathieu.desnoyers@polymtl.ca>2007-05-08 00:34:27 -0700
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-05-08 11:15:19 -0700
commitf46e477ed94f6407982690ef53dab7898834268f (patch)
tree40b53e2bfab3f532fcd490a5aad54b39deef7a51
parent8ffe9d0bffa441de41d8543a984e552d49293641 (diff)
downloadlinux-3.10-f46e477ed94f6407982690ef53dab7898834268f.tar.gz
linux-3.10-f46e477ed94f6407982690ef53dab7898834268f.tar.bz2
linux-3.10-f46e477ed94f6407982690ef53dab7898834268f.zip
atomic.h: add atomic64 cmpxchg, xchg and add_unless to powerpc
[akpm@linux-foundation.org: build fixes] Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> Cc: Paul Mackerras <paulus@samba.org> Cc: Benjamin Herrenschmidt <benh@kernel.crashing.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
-rw-r--r--include/asm-powerpc/atomic.h6
-rw-r--r--include/asm-powerpc/bitops.h1
-rw-r--r--include/asm-powerpc/system.h130
3 files changed, 131 insertions, 6 deletions
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h
index 2ce4b6b7b34..438a7fcfba5 100644
--- a/include/asm-powerpc/atomic.h
+++ b/include/asm-powerpc/atomic.h
@@ -165,8 +165,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
return t;
}
-#define atomic_cmpxchg(v, o, n) \
- ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
@@ -414,8 +413,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
return t;
}
-#define atomic64_cmpxchg(v, o, n) \
- ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
+#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
/**
diff --git a/include/asm-powerpc/bitops.h b/include/asm-powerpc/bitops.h
index 8f757f6246e..8144a2788db 100644
--- a/include/asm-powerpc/bitops.h
+++ b/include/asm-powerpc/bitops.h
@@ -39,7 +39,6 @@
#ifdef __KERNEL__
#include <linux/compiler.h>
-#include <asm/atomic.h>
#include <asm/asm-compat.h>
#include <asm/synch.h>
diff --git a/include/asm-powerpc/system.h b/include/asm-powerpc/system.h
index d3e0906ff2b..77bf5873a01 100644
--- a/include/asm-powerpc/system.h
+++ b/include/asm-powerpc/system.h
@@ -7,7 +7,6 @@
#include <linux/kernel.h>
#include <asm/hw_irq.h>
-#include <asm/atomic.h>
/*
* Memory barrier.
@@ -227,6 +226,29 @@ __xchg_u32(volatile void *p, unsigned long val)
return prev;
}
+/*
+ * Atomic exchange
+ *
+ * Changes the memory location '*ptr' to be val and returns
+ * the previous value stored there.
+ */
+static __inline__ unsigned long
+__xchg_u32_local(volatile void *p, unsigned long val)
+{
+ unsigned long prev;
+
+ __asm__ __volatile__(
+"1: lwarx %0,0,%2 \n"
+ PPC405_ERR77(0,%2)
+" stwcx. %3,0,%2 \n\
+ bne- 1b"
+ : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
+ : "r" (p), "r" (val)
+ : "cc", "memory");
+
+ return prev;
+}
+
#ifdef CONFIG_PPC64
static __inline__ unsigned long
__xchg_u64(volatile void *p, unsigned long val)
@@ -246,6 +268,23 @@ __xchg_u64(volatile void *p, unsigned long val)
return prev;
}
+
+static __inline__ unsigned long
+__xchg_u64_local(volatile void *p, unsigned long val)
+{
+ unsigned long prev;
+
+ __asm__ __volatile__(
+"1: ldarx %0,0,%2 \n"
+ PPC405_ERR77(0,%2)
+" stdcx. %3,0,%2 \n\
+ bne- 1b"
+ : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
+ : "r" (p), "r" (val)
+ : "cc", "memory");
+
+ return prev;
+}
#endif
/*
@@ -269,12 +308,33 @@ __xchg(volatile void *ptr, unsigned long x, unsigned int size)
return x;
}
+static __inline__ unsigned long
+__xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
+{
+ switch (size) {
+ case 4:
+ return __xchg_u32_local(ptr, x);
+#ifdef CONFIG_PPC64
+ case 8:
+ return __xchg_u64_local(ptr, x);
+#endif
+ }
+ __xchg_called_with_bad_pointer();
+ return x;
+}
#define xchg(ptr,x) \
({ \
__typeof__(*(ptr)) _x_ = (x); \
(__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
})
+#define xchg_local(ptr,x) \
+ ({ \
+ __typeof__(*(ptr)) _x_ = (x); \
+ (__typeof__(*(ptr))) __xchg_local((ptr), \
+ (unsigned long)_x_, sizeof(*(ptr))); \
+ })
+
#define tas(ptr) (xchg((ptr),1))
/*
@@ -306,6 +366,28 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
return prev;
}
+static __inline__ unsigned long
+__cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
+ unsigned long new)
+{
+ unsigned int prev;
+
+ __asm__ __volatile__ (
+"1: lwarx %0,0,%2 # __cmpxchg_u32\n\
+ cmpw 0,%0,%3\n\
+ bne- 2f\n"
+ PPC405_ERR77(0,%2)
+" stwcx. %4,0,%2\n\
+ bne- 1b"
+ "\n\
+2:"
+ : "=&r" (prev), "+m" (*p)
+ : "r" (p), "r" (old), "r" (new)
+ : "cc", "memory");
+
+ return prev;
+}
+
#ifdef CONFIG_PPC64
static __inline__ unsigned long
__cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
@@ -328,6 +410,27 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
return prev;
}
+
+static __inline__ unsigned long
+__cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
+ unsigned long new)
+{
+ unsigned long prev;
+
+ __asm__ __volatile__ (
+"1: ldarx %0,0,%2 # __cmpxchg_u64\n\
+ cmpd 0,%0,%3\n\
+ bne- 2f\n\
+ stdcx. %4,0,%2\n\
+ bne- 1b"
+ "\n\
+2:"
+ : "=&r" (prev), "+m" (*p)
+ : "r" (p), "r" (old), "r" (new)
+ : "cc", "memory");
+
+ return prev;
+}
#endif
/* This function doesn't exist, so you'll get a linker error
@@ -350,6 +453,22 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
return old;
}
+static __inline__ unsigned long
+__cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new,
+ unsigned int size)
+{
+ switch (size) {
+ case 4:
+ return __cmpxchg_u32_local(ptr, old, new);
+#ifdef CONFIG_PPC64
+ case 8:
+ return __cmpxchg_u64_local(ptr, old, new);
+#endif
+ }
+ __cmpxchg_called_with_bad_pointer();
+ return old;
+}
+
#define cmpxchg(ptr,o,n) \
({ \
__typeof__(*(ptr)) _o_ = (o); \
@@ -358,6 +477,15 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
(unsigned long)_n_, sizeof(*(ptr))); \
})
+
+#define cmpxchg_local(ptr,o,n) \
+ ({ \
+ __typeof__(*(ptr)) _o_ = (o); \
+ __typeof__(*(ptr)) _n_ = (n); \
+ (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \
+ (unsigned long)_n_, sizeof(*(ptr))); \
+ })
+
#ifdef CONFIG_PPC64
/*
* We handle most unaligned accesses in hardware. On the other hand