summaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
authorTejun Heo <tj@kernel.org>2014-06-17 19:12:39 -0400
committerTejun Heo <tj@kernel.org>2014-06-17 19:12:39 -0400
commit9c28278a24c01c0073fb89e53c1d2a605ab9587d (patch)
treee180627880bd5eaa5e1e8965abe583d0ee5ca989 /include
parenta32f8d8eda8bd49017ac5f88e2b859f1f582557f (diff)
downloadkernel-common-9c28278a24c01c0073fb89e53c1d2a605ab9587d.tar.gz
kernel-common-9c28278a24c01c0073fb89e53c1d2a605ab9587d.tar.bz2
kernel-common-9c28278a24c01c0073fb89e53c1d2a605ab9587d.zip
percpu: reorder macros in percpu header files
* In include/asm-generic/percpu.h, collect {raw|_this}_cpu_generic*() macros into one place. They were dispersed through {raw|this}_cpu_*_N() definitions and the visiual inconsistency was making following the code unnecessarily difficult. * In include/linux/percpu-defs.h, move __verify_pcpu_ptr() later in the file so that it's right above accessor definitions where it's actually used. This is pure reorganization. Signed-off-by: Tejun Heo <tj@kernel.org> Acked-by: Christoph Lameter <cl@linux.com>
Diffstat (limited to 'include')
-rw-r--r--include/asm-generic/percpu.h198
-rw-r--r--include/linux/percpu-defs.h26
2 files changed, 112 insertions, 112 deletions
diff --git a/include/asm-generic/percpu.h b/include/asm-generic/percpu.h
index 932ce602128f..2300d989087b 100644
--- a/include/asm-generic/percpu.h
+++ b/include/asm-generic/percpu.h
@@ -65,6 +65,105 @@ extern void setup_per_cpu_areas(void);
#define PER_CPU_DEF_ATTRIBUTES
#endif
+#define raw_cpu_generic_to_op(pcp, val, op) \
+do { \
+ *raw_cpu_ptr(&(pcp)) op val; \
+} while (0)
+
+#define raw_cpu_generic_add_return(pcp, val) \
+({ \
+ raw_cpu_add(pcp, val); \
+ raw_cpu_read(pcp); \
+})
+
+#define raw_cpu_generic_xchg(pcp, nval) \
+({ typeof(pcp) ret__; \
+ ret__ = raw_cpu_read(pcp); \
+ raw_cpu_write(pcp, nval); \
+ ret__; \
+})
+
+#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
+({ \
+ typeof(pcp) ret__; \
+ ret__ = raw_cpu_read(pcp); \
+ if (ret__ == (oval)) \
+ raw_cpu_write(pcp, nval); \
+ ret__; \
+})
+
+#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+({ \
+ int __ret = 0; \
+ if (raw_cpu_read(pcp1) == (oval1) && \
+ raw_cpu_read(pcp2) == (oval2)) { \
+ raw_cpu_write(pcp1, (nval1)); \
+ raw_cpu_write(pcp2, (nval2)); \
+ __ret = 1; \
+ } \
+ (__ret); \
+})
+
+#define _this_cpu_generic_read(pcp) \
+({ typeof(pcp) ret__; \
+ preempt_disable(); \
+ ret__ = *this_cpu_ptr(&(pcp)); \
+ preempt_enable(); \
+ ret__; \
+})
+
+#define _this_cpu_generic_to_op(pcp, val, op) \
+do { \
+ unsigned long flags; \
+ raw_local_irq_save(flags); \
+ *raw_cpu_ptr(&(pcp)) op val; \
+ raw_local_irq_restore(flags); \
+} while (0)
+
+#define _this_cpu_generic_add_return(pcp, val) \
+({ \
+ typeof(pcp) ret__; \
+ unsigned long flags; \
+ raw_local_irq_save(flags); \
+ raw_cpu_add(pcp, val); \
+ ret__ = raw_cpu_read(pcp); \
+ raw_local_irq_restore(flags); \
+ ret__; \
+})
+
+#define _this_cpu_generic_xchg(pcp, nval) \
+({ typeof(pcp) ret__; \
+ unsigned long flags; \
+ raw_local_irq_save(flags); \
+ ret__ = raw_cpu_read(pcp); \
+ raw_cpu_write(pcp, nval); \
+ raw_local_irq_restore(flags); \
+ ret__; \
+})
+
+#define _this_cpu_generic_cmpxchg(pcp, oval, nval) \
+({ \
+ typeof(pcp) ret__; \
+ unsigned long flags; \
+ raw_local_irq_save(flags); \
+ ret__ = raw_cpu_read(pcp); \
+ if (ret__ == (oval)) \
+ raw_cpu_write(pcp, nval); \
+ raw_local_irq_restore(flags); \
+ ret__; \
+})
+
+#define _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+({ \
+ int ret__; \
+ unsigned long flags; \
+ raw_local_irq_save(flags); \
+ ret__ = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
+ oval1, oval2, nval1, nval2); \
+ raw_local_irq_restore(flags); \
+ ret__; \
+})
+
# ifndef raw_cpu_read_1
# define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp)))
# endif
@@ -78,11 +177,6 @@ extern void setup_per_cpu_areas(void);
# define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp)))
# endif
-#define raw_cpu_generic_to_op(pcp, val, op) \
-do { \
- *raw_cpu_ptr(&(pcp)) op val; \
-} while (0)
-
# ifndef raw_cpu_write_1
# define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
# endif
@@ -135,12 +229,6 @@ do { \
# define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
# endif
-#define raw_cpu_generic_add_return(pcp, val) \
-({ \
- raw_cpu_add(pcp, val); \
- raw_cpu_read(pcp); \
-})
-
# ifndef raw_cpu_add_return_1
# define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
# endif
@@ -154,13 +242,6 @@ do { \
# define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
# endif
-#define raw_cpu_generic_xchg(pcp, nval) \
-({ typeof(pcp) ret__; \
- ret__ = raw_cpu_read(pcp); \
- raw_cpu_write(pcp, nval); \
- ret__; \
-})
-
# ifndef raw_cpu_xchg_1
# define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
# endif
@@ -174,15 +255,6 @@ do { \
# define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
# endif
-#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
-({ \
- typeof(pcp) ret__; \
- ret__ = raw_cpu_read(pcp); \
- if (ret__ == (oval)) \
- raw_cpu_write(pcp, nval); \
- ret__; \
-})
-
# ifndef raw_cpu_cmpxchg_1
# define raw_cpu_cmpxchg_1(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
# endif
@@ -196,18 +268,6 @@ do { \
# define raw_cpu_cmpxchg_8(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
# endif
-#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
-({ \
- int __ret = 0; \
- if (raw_cpu_read(pcp1) == (oval1) && \
- raw_cpu_read(pcp2) == (oval2)) { \
- raw_cpu_write(pcp1, (nval1)); \
- raw_cpu_write(pcp2, (nval2)); \
- __ret = 1; \
- } \
- (__ret); \
-})
-
# ifndef raw_cpu_cmpxchg_double_1
# define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
@@ -225,14 +285,6 @@ do { \
raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
# endif
-#define _this_cpu_generic_read(pcp) \
-({ typeof(pcp) ret__; \
- preempt_disable(); \
- ret__ = *this_cpu_ptr(&(pcp)); \
- preempt_enable(); \
- ret__; \
-})
-
# ifndef this_cpu_read_1
# define this_cpu_read_1(pcp) _this_cpu_generic_read(pcp)
# endif
@@ -246,14 +298,6 @@ do { \
# define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp)
# endif
-#define _this_cpu_generic_to_op(pcp, val, op) \
-do { \
- unsigned long flags; \
- raw_local_irq_save(flags); \
- *raw_cpu_ptr(&(pcp)) op val; \
- raw_local_irq_restore(flags); \
-} while (0)
-
# ifndef this_cpu_write_1
# define this_cpu_write_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
# endif
@@ -306,17 +350,6 @@ do { \
# define this_cpu_or_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
# endif
-#define _this_cpu_generic_add_return(pcp, val) \
-({ \
- typeof(pcp) ret__; \
- unsigned long flags; \
- raw_local_irq_save(flags); \
- raw_cpu_add(pcp, val); \
- ret__ = raw_cpu_read(pcp); \
- raw_local_irq_restore(flags); \
- ret__; \
-})
-
# ifndef this_cpu_add_return_1
# define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val)
# endif
@@ -330,16 +363,6 @@ do { \
# define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val)
# endif
-#define _this_cpu_generic_xchg(pcp, nval) \
-({ typeof(pcp) ret__; \
- unsigned long flags; \
- raw_local_irq_save(flags); \
- ret__ = raw_cpu_read(pcp); \
- raw_cpu_write(pcp, nval); \
- raw_local_irq_restore(flags); \
- ret__; \
-})
-
# ifndef this_cpu_xchg_1
# define this_cpu_xchg_1(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
# endif
@@ -353,18 +376,6 @@ do { \
# define this_cpu_xchg_8(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
# endif
-#define _this_cpu_generic_cmpxchg(pcp, oval, nval) \
-({ \
- typeof(pcp) ret__; \
- unsigned long flags; \
- raw_local_irq_save(flags); \
- ret__ = raw_cpu_read(pcp); \
- if (ret__ == (oval)) \
- raw_cpu_write(pcp, nval); \
- raw_local_irq_restore(flags); \
- ret__; \
-})
-
# ifndef this_cpu_cmpxchg_1
# define this_cpu_cmpxchg_1(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
# endif
@@ -378,17 +389,6 @@ do { \
# define this_cpu_cmpxchg_8(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
# endif
-#define _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
-({ \
- int ret__; \
- unsigned long flags; \
- raw_local_irq_save(flags); \
- ret__ = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
- oval1, oval2, nval1, nval2); \
- raw_local_irq_restore(flags); \
- ret__; \
-})
-
# ifndef this_cpu_cmpxchg_double_1
# define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
_this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
diff --git a/include/linux/percpu-defs.h b/include/linux/percpu-defs.h
index 6710eb9555fa..fd0b9ee19ec8 100644
--- a/include/linux/percpu-defs.h
+++ b/include/linux/percpu-defs.h
@@ -53,19 +53,6 @@
__attribute__((section(".discard"), unused))
/*
- * Macro which verifies @ptr is a percpu pointer without evaluating
- * @ptr. This is to be used in percpu accessors to verify that the
- * input parameter is a percpu pointer.
- *
- * + 0 is required in order to convert the pointer type from a
- * potential array type to a pointer to a single item of the array.
- */
-#define __verify_pcpu_ptr(ptr) do { \
- const void __percpu *__vpp_verify = (typeof((ptr) + 0))NULL; \
- (void)__vpp_verify; \
-} while (0)
-
-/*
* s390 and alpha modules require percpu variables to be defined as
* weak to force the compiler to generate GOT based external
* references for them. This is necessary because percpu sections
@@ -203,6 +190,19 @@
*/
#ifndef __ASSEMBLY__
+/*
+ * Macro which verifies @ptr is a percpu pointer without evaluating
+ * @ptr. This is to be used in percpu accessors to verify that the
+ * input parameter is a percpu pointer.
+ *
+ * + 0 is required in order to convert the pointer type from a
+ * potential array type to a pointer to a single item of the array.
+ */
+#define __verify_pcpu_ptr(ptr) do { \
+ const void __percpu *__vpp_verify = (typeof((ptr) + 0))NULL; \
+ (void)__vpp_verify; \
+} while (0)
+
#ifdef CONFIG_SMP
/*