[PATCH] i386: rwlock.h fix smp alternatives fix
authorChris Wright <chrisw@sous-sol.org>
Thu, 31 Aug 2006 07:53:22 +0000 (00:53 -0700)
committerLinus Torvalds <torvalds@g5.osdl.org>
Thu, 31 Aug 2006 17:46:07 +0000 (10:46 -0700)
Commit 8c74932779fc6f61b4c30145863a17125c1a296c ("i386: Remove
alternative_smp") did not actually compile on x86 with CONFIG_SMP.

This fixes the __build_read/write_lock helpers.  I've boot tested on
SMP.

[ Andi: "Oops, I think that was a quilt unrefreshed patch.  Sorry.  I
  fixed those before testing, but then still send out the old patch." ]

Signed-off-by: Chris Wright <chrisw@sous-sol.org>
Cc: Gerd Hoffmann <kraxel@suse.de>
Acked-by: Andi Kleen <ak@suse.de>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
include/asm-i386/rwlock.h

index 3ac1ba98b1bca861e9c64d10eb6fd21b84f225a5..87c069ccba084dcd76045858ef50ca0878cc416c 100644 (file)
 #define RW_LOCK_BIAS_STR       "0x01000000"
 
 #define __build_read_lock_ptr(rw, helper)   \
-       asm volatile(LOCK_PREFIX " subl $1,(%0)\n\t" \
+       asm volatile(LOCK_PREFIX " subl $1,(%0)\n\t" \
                        "jns 1f\n" \
                        "call " helper "\n\t" \
                        "1:\n" \
-                       :"a" (rw) : "memory")
+                       ::"a" (rw) : "memory")
 
 #define __build_read_lock_const(rw, helper)   \
-       asm volatile(LOCK_PREFIX " subl $1,%0\n\t" \
+       asm volatile(LOCK_PREFIX " subl $1,%0\n\t" \
                        "jns 1f\n" \
                        "pushl %%eax\n\t" \
                        "leal %0,%%eax\n\t" \
                        "call " helper "\n\t" \
                        "popl %%eax\n\t" \
-                       "1:\n" \
-                       "+m" (*(volatile int *)rw) : : "memory")
+                       "1:\n" \
+                       :"+m" (*(volatile int *)rw) : : "memory")
 
 #define __build_read_lock(rw, helper)  do { \
                                                if (__builtin_constant_p(rw)) \
                                        } while (0)
 
 #define __build_write_lock_ptr(rw, helper) \
-       asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
+       asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
                        "jz 1f\n" \
                        "call " helper "\n\t" \
-                       "1:\n", \
-                       "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t", \
-                       :"a" (rw) : "memory")
+                       "1:\n" \
+                       ::"a" (rw) : "memory")
 
 #define __build_write_lock_const(rw, helper) \
-       asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
+       asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
                        "jz 1f\n" \
                        "pushl %%eax\n\t" \
                        "leal %0,%%eax\n\t" \
                        "call " helper "\n\t" \
                        "popl %%eax\n\t" \
-                       "1:\n", \
-                       "subl $" RW_LOCK_BIAS_STR ",%0\n\t", \
-                       "+m" (*(volatile int *)rw) : : "memory")
+                       "1:\n" \
+                       :"+m" (*(volatile int *)rw) : : "memory")
 
 #define __build_write_lock(rw, helper) do { \
                                                if (__builtin_constant_p(rw)) \