2006-08-31 07:50:32

by Chris Wright

[permalink] [raw]
Subject: [PATCH] i386: rwlock.h fix smp alternatives fix

This last patch smp alternatives code did not actually compile on
x86 with CONFIG_SMP. This fixes the __build_read/write_lock helpers.
I've boot tested on SMP.

Signed-off-by: Chris Wright <[email protected]>
---
include/asm-i386/rwlock.h | 24 +++++++++++-------------
1 file changed, 11 insertions(+), 13 deletions(-)

--- a/include/asm-i386/rwlock.h Thu Aug 31 06:05:15 2006 +0700
+++ b/include/asm-i386/rwlock.h Thu Aug 31 03:23:20 2006 -0400
@@ -21,21 +21,21 @@
#define RW_LOCK_BIAS_STR "0x01000000"

#define __build_read_lock_ptr(rw, helper) \
- asm volatile(LOCK_PREFIX " ; subl $1,(%0)\n\t" \
+ asm volatile(LOCK_PREFIX " subl $1,(%0)\n\t" \
"jns 1f\n" \
"call " helper "\n\t" \
"1:\n" \
- :"a" (rw) : "memory")
+ ::"a" (rw) : "memory")

#define __build_read_lock_const(rw, helper) \
- asm volatile(LOCK_PREFIX " ; subl $1,%0\n\t" \
+ asm volatile(LOCK_PREFIX " subl $1,%0\n\t" \
"jns 1f\n" \
"pushl %%eax\n\t" \
"leal %0,%%eax\n\t" \
"call " helper "\n\t" \
"popl %%eax\n\t" \
- "1:\n" : \
- "+m" (*(volatile int *)rw) : : "memory")
+ "1:\n" \
+ :"+m" (*(volatile int *)rw) : : "memory")

#define __build_read_lock(rw, helper) do { \
if (__builtin_constant_p(rw)) \
@@ -45,23 +45,21 @@
} while (0)

#define __build_write_lock_ptr(rw, helper) \
- asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
+ asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
"jz 1f\n" \
"call " helper "\n\t" \
- "1:\n", \
- "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t", \
- :"a" (rw) : "memory")
+ "1:\n" \
+ ::"a" (rw) : "memory")

#define __build_write_lock_const(rw, helper) \
- asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
+ asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
"jz 1f\n" \
"pushl %%eax\n\t" \
"leal %0,%%eax\n\t" \
"call " helper "\n\t" \
"popl %%eax\n\t" \
- "1:\n", \
- "subl $" RW_LOCK_BIAS_STR ",%0\n\t", \
- "+m" (*(volatile int *)rw) : : "memory")
+ "1:\n" \
+ :"+m" (*(volatile int *)rw) : : "memory")

#define __build_write_lock(rw, helper) do { \
if (__builtin_constant_p(rw)) \


2006-08-31 08:11:52

by Andi Kleen

[permalink] [raw]
Subject: Re: [PATCH] i386: rwlock.h fix smp alternatives fix

On Thursday 31 August 2006 09:53, Chris Wright wrote:
> This last patch smp alternatives code did not actually compile on
> x86 with CONFIG_SMP. This fixes the __build_read/write_lock helpers.
> I've boot tested on SMP.

Oops, I think that was a quilt unrefreshed patch. Sorry. I fixed
those before testing, but then still send out the old patch.

-Andi

Here's the patch as intended for reference :/ Or Chris' incremental
is fine.

i386: Remove alternative_smp

The .fill causes miscompilations with some binutils version.

Instead just patch the lock prefix in the lock constructs. That is the
majority of the cost and should be good enough.

Cc: [email protected]

Signed-off-by: Andi Kleen <[email protected]>

---
include/asm-i386/alternative.h | 20 --------------------
include/asm-i386/rwlock.h | 14 ++++++--------
include/asm-i386/spinlock.h | 19 ++++++-------------
3 files changed, 12 insertions(+), 41 deletions(-)

Index: linux/include/asm-i386/alternative.h
===================================================================
--- linux.orig/include/asm-i386/alternative.h
+++ linux/include/asm-i386/alternative.h
@@ -88,9 +88,6 @@ static inline void alternatives_smp_swit
/*
* Alternative inline assembly for SMP.
*
- * alternative_smp() takes two versions (SMP first, UP second) and is
- * for more complex stuff such as spinlocks.
- *
* The LOCK_PREFIX macro defined here replaces the LOCK and
* LOCK_PREFIX macros used everywhere in the source tree.
*
@@ -110,21 +107,6 @@ static inline void alternatives_smp_swit
*/

#ifdef CONFIG_SMP
-#define alternative_smp(smpinstr, upinstr, args...) \
- asm volatile ("661:\n\t" smpinstr "\n662:\n" \
- ".section .smp_altinstructions,\"a\"\n" \
- " .align 4\n" \
- " .long 661b\n" /* label */ \
- " .long 663f\n" /* new instruction */ \
- " .byte " __stringify(X86_FEATURE_UP) "\n" \
- " .byte 662b-661b\n" /* sourcelen */ \
- " .byte 664f-663f\n" /* replacementlen */ \
- ".previous\n" \
- ".section .smp_altinstr_replacement,\"awx\"\n" \
- "663:\n\t" upinstr "\n" /* replacement */ \
- "664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \
- ".previous" : args)
-
#define LOCK_PREFIX \
".section .smp_locks,\"a\"\n" \
" .align 4\n" \
@@ -133,8 +115,6 @@ static inline void alternatives_smp_swit
"661:\n\tlock; "

#else /* ! CONFIG_SMP */
-#define alternative_smp(smpinstr, upinstr, args...) \
- asm volatile (upinstr : args)
#define LOCK_PREFIX ""
#endif

Index: linux/include/asm-i386/rwlock.h
===================================================================
--- linux.orig/include/asm-i386/rwlock.h
+++ linux/include/asm-i386/rwlock.h
@@ -21,19 +21,17 @@
#define RW_LOCK_BIAS_STR "0x01000000"

#define __build_read_lock(rw, helper) \
- alternative_smp("lock; subl $1,(%0)\n\t" \
+ asm volatile(LOCK_PREFIX " ; subl $1,(%0)\n\t" \
"jns 1f\n" \
"call " helper "\n\t" \
- "1:\n", \
- "subl $1,(%0)\n\t", \
- :"a" (rw) : "memory")
+ "1:\n" \
+ ::"a" (rw) : "memory")

#define __build_write_lock(rw, helper) \
- alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
+ asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
"jz 1f\n" \
"call " helper "\n\t" \
- "1:\n", \
- "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t", \
- :"a" (rw) : "memory")
+ "1:\n" \
+ ::"a" (rw) : "memory")

#endif
Index: linux/include/asm-i386/spinlock.h
===================================================================
--- linux.orig/include/asm-i386/spinlock.h
+++ linux/include/asm-i386/spinlock.h
@@ -22,7 +22,7 @@

#define __raw_spin_lock_string \
"\n1:\t" \
- "lock ; decb %0\n\t" \
+ LOCK_PREFIX " ; decb %0\n\t" \
"jns 3f\n" \
"2:\t" \
"rep;nop\n\t" \
@@ -38,7 +38,7 @@
*/
#define __raw_spin_lock_string_flags \
"\n1:\t" \
- "lock ; decb %0\n\t" \
+ LOCK_PREFIX " ; decb %0\n\t" \
"jns 5f\n" \
"2:\t" \
"testl $0x200, %1\n\t" \
@@ -57,15 +57,9 @@
"jmp 4b\n" \
"5:\n\t"

-#define __raw_spin_lock_string_up \
- "\n\tdecb %0"
-
static inline void __raw_spin_lock(raw_spinlock_t *lock)
{
- alternative_smp(
- __raw_spin_lock_string,
- __raw_spin_lock_string_up,
- "+m" (lock->slock) : : "memory");
+ asm volatile(__raw_spin_lock_string : "+m" (lock->slock) : : "memory");
}

/*
@@ -76,10 +70,9 @@ static inline void __raw_spin_lock(raw_s
#ifndef CONFIG_PROVE_LOCKING
static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags)
{
- alternative_smp(
- __raw_spin_lock_string_flags,
- __raw_spin_lock_string_up,
- "+m" (lock->slock) : "r" (flags) : "memory");
+ asm volatile(
+ __raw_spin_lock_string_flags
+ : "+m" (lock->slock) : "r" (flags) : "memory");
}
#endif




2006-08-31 16:25:25

by Chuck Ebbert

[permalink] [raw]
Subject: Re: [PATCH] i386: rwlock.h fix smp alternatives fix

In-Reply-To: <[email protected]>

On Thu, 31 Aug 2006 10:11:45 +0200, Andi Kleen wrote:

> Here's the patch as intended for reference :/ Or Chris' incremental
> is fine.
>
> i386: Remove alternative_smp
>
> The .fill causes miscompilations with some binutils version.

Has the dust settled enough to prepare a patch for -stable now?

--
Chuck

2006-08-31 18:01:52

by Chris Wright

[permalink] [raw]
Subject: Re: [PATCH] i386: rwlock.h fix smp alternatives fix

* Chuck Ebbert ([email protected]) wrote:
> In-Reply-To: <[email protected]>
>
> On Thu, 31 Aug 2006 10:11:45 +0200, Andi Kleen wrote:
>
> > Here's the patch as intended for reference :/ Or Chris' incremental
> > is fine.
> >
> > i386: Remove alternative_smp
> >
> > The .fill causes miscompilations with some binutils version.
>
> Has the dust settled enough to prepare a patch for -stable now?

Seems so, although Linus hasn't fixed upstream yet. The binutils
issues look worrisome enough, miscompilation should certainly be fixed
in -stable. And we should get both i386 and the x86_64 fix as well.
I'll add those unless Andi objects.

thanks,
-chris