2009-12-19 00:27:59

by Christoph Lameter

[permalink] [raw]
Subject: [this_cpu_xx V8 12/16] Add percpu cmpxchg operations

These are needed for the ringbuffer logic.

Signed-off-by: Christoph Lameter <[email protected]>

---
arch/x86/include/asm/percpu.h | 12 ++++++++++++
1 file changed, 12 insertions(+)

Index: linux-2.6/arch/x86/include/asm/percpu.h
===================================================================
--- linux-2.6.orig/arch/x86/include/asm/percpu.h 2009-12-18 15:00:53.000000000 -0600
+++ linux-2.6/arch/x86/include/asm/percpu.h 2009-12-18 15:06:09.000000000 -0600
@@ -201,6 +201,9 @@ do { \
#define __this_cpu_xor_1(pcp, val) percpu_to_op("xor", (pcp), val)
#define __this_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val)
#define __this_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val)
+#define __this_cpu_cmpxchg_1(pcp, old,new) cmpxchg_local(__this_cpu_ptr(pcp), old, new)
+#define __this_cpu_cmpxchg_2(pcp, old,new) cmpxchg_local(__this_cpu_ptr(pcp), old, new)
+#define __this_cpu_cmpxchg_4(pcp, old,new) cmpxchg_local(__this_cpu_ptr(pcp), old, new)

#define this_cpu_read_1(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
#define this_cpu_read_2(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
@@ -226,6 +229,9 @@ do { \
#define this_cpu_xor_1(pcp, val) percpu_to_op("xor", (pcp), val)
#define this_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val)
#define this_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val)
+#define this_cpu_cmpxchg_1(pcp, old,new) cmpxchg_local(this_cpu_ptr(pcp), old, new)
+#define this_cpu_cmpxchg_2(pcp, old,new) cmpxchg_local(this_cpu_ptr(pcp), old, new)
+#define this_cpu_cmpxchg_4(pcp, old,new) cmpxchg_local(this_cpu_ptr(pcp), old, new)

#define irqsafe_cpu_add_1(pcp, val) percpu_to_op("add", (pcp), val)
#define irqsafe_cpu_add_2(pcp, val) percpu_to_op("add", (pcp), val)
@@ -245,6 +251,9 @@ do { \
#define irqsafe_cpu_xor_1(pcp, val) percpu_to_op("xor", (pcp), val)
#define irqsafe_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val)
#define irqsafe_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val)
+#define irqsafe_cpu_cmpxchg_1(pcp, old,new) cmpxchg_local(this_cpu_ptr(pcp), old, new)
+#define irqsafe_cpu_cmpxchg_2(pcp, old,new) cmpxchg_local(this_cpu_ptr(pcp), old, new)
+#define irqsafe_cpu_cmpxchg_4(pcp, old,new) cmpxchg_local(this_cpu_ptr(pcp), old, new)

/*
* Per cpu atomic 64 bit operations are only available under 64 bit.
@@ -259,6 +268,7 @@ do { \
#define __this_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val)
#define __this_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val)
#define __this_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val)
+#define __this_cpu_cmpxchg_8(pcp, old,new) cmpxchg_local(__this_cpu_ptr(pcp), old, new)

#define this_cpu_read_8(pcp) percpu_from_op("mov", (pcp), "m"(pcp))
#define this_cpu_write_8(pcp, val) percpu_to_op("mov", (pcp), val)
@@ -268,6 +278,7 @@ do { \
#define this_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val)
#define this_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val)
#define this_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val)
+#define this_cpu_cmpxchg_8(pcp, old,new) cmpxchg_local(this_cpu_ptr(pcp), old, new)

#define irqsafe_cpu_add_8(pcp, val) percpu_to_op("add", (pcp), val)
#define irqsafe_cpu_inc_8(pcp) percpu_var_op("inc", (pcp))
@@ -275,6 +286,7 @@ do { \
#define irqsafe_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val)
#define irqsafe_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val)
#define irqsafe_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val)
+#define irqsafe_cpu_cmpxchg_8(pcp, old,new) cmpxchg_local(this_cpu_ptr(pcp), old, new)

#endif


--