x86: fix asm constraints in spinlock_32/64.h
authorThomas Gleixner <tglx@linutronix.de>
Wed, 30 Jan 2008 12:30:34 +0000 (13:30 +0100)
committerIngo Molnar <mingo@elte.hu>
Wed, 30 Jan 2008 12:30:34 +0000 (13:30 +0100)
Use the correct constraints for the spinlock assembler functions.

read (modify) write functions need "+m" instead of "=m"

Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
include/asm-x86/spinlock_32.h
include/asm-x86/spinlock_64.h

index c42c3f12d7ce9b0b7b184a694b088caded8735cf..fca124a1103d930c6ae27234ea9c34a50fc42e04 100644 (file)
@@ -99,7 +99,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
 
 static inline void __raw_spin_unlock(raw_spinlock_t *lock)
 {
-       asm volatile("movb $1,%0" : "+m" (lock->slock) :: "memory");
+       asm volatile("movb $1,%0" : "=m" (lock->slock) :: "memory");
 }
 
 #else
index 3b5adf92ad089529f068bd274999b28942c67b16..e81f6c18d87775dae9b850e46a477b442f6d35b1 100644 (file)
@@ -34,7 +34,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
                "jle 3b\n\t"
                "jmp 1b\n"
                "2:\t"
-               : "=m" (lock->slock) : : "memory");
+               : "+m" (lock->slock) : : "memory");
 }
 
 /*
@@ -80,7 +80,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
 
        asm volatile(
                "xchgl %0,%1"
-               :"=q" (oldval), "=m" (lock->slock)
+               :"=q" (oldval), "+m" (lock->slock)
                :"0" (0) : "memory");
 
        return oldval > 0;
@@ -162,13 +162,13 @@ static inline int __raw_write_trylock(raw_rwlock_t *lock)
 
 static inline void __raw_read_unlock(raw_rwlock_t *rw)
 {
-       asm volatile(LOCK_PREFIX "incl %0" :"=m" (rw->lock) : : "memory");
+       asm volatile(LOCK_PREFIX "incl %0" :"+m" (rw->lock) : : "memory");
 }
 
 static inline void __raw_write_unlock(raw_rwlock_t *rw)
 {
-       asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ",%0"
-                               : "=m" (rw->lock) : : "memory");
+       asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0"
+                               : "+m" (rw->lock) : : "memory");
 }
 
 #define _raw_spin_relax(lock)  cpu_relax()