[PATCH] atomic: add_unless cmpxchg optimise
authorNick Piggin <npiggin@suse.de>
Thu, 23 Mar 2006 11:01:02 +0000 (03:01 -0800)
committerLinus Torvalds <torvalds@g5.osdl.org>
Thu, 23 Mar 2006 15:38:17 +0000 (07:38 -0800)
Without branch hints, the very unlikely chance of the loop repeating due to
cmpxchg failure is unrolled with gcc-4 that I have tested.

Improve this for architectures with a native cas/cmpxchg.  llsc archs
should try to implement this natively.

Signed-off-by: Nick Piggin <npiggin@suse.de>
Cc: Andi Kleen <ak@muc.de>
Cc: Martin Schwidefsky <schwidefsky@de.ibm.com>
Cc: Heiko Carstens <heiko.carstens@de.ibm.com>
Cc: "David S. Miller" <davem@davemloft.net>
Cc: Roman Zippel <zippel@linux-m68k.org>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
include/asm-i386/atomic.h
include/asm-ia64/atomic.h
include/asm-m68k/atomic.h
include/asm-s390/atomic.h
include/asm-sparc64/atomic.h
include/asm-x86_64/atomic.h

index 78b0032d1f2907e5cfba92b5675b19affd4f4867..22d80ece95cb48de54ed1dea6e0c9475d10935ba 100644 (file)
@@ -225,8 +225,14 @@ static __inline__ int atomic_sub_return(int i, atomic_t *v)
 ({                                                             \
        int c, old;                                             \
        c = atomic_read(v);                                     \
-       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+       for (;;) {                                              \
+               if (unlikely(c == (u)))                         \
+                       break;                                  \
+               old = atomic_cmpxchg((v), c, c + (a));          \
+               if (likely(old == c))                           \
+                       break;                                  \
                c = old;                                        \
+       }                                                       \
        c != (u);                                               \
 })
 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
index d3e0dfa99e1f95e224e0c175a169cf040c7a76fc..569ec7574baf24b01ce450c4f6a5cdc32d805a60 100644 (file)
@@ -95,8 +95,14 @@ ia64_atomic64_sub (__s64 i, atomic64_t *v)
 ({                                                             \
        int c, old;                                             \
        c = atomic_read(v);                                     \
-       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+       for (;;) {                                              \
+               if (unlikely(c == (u)))                         \
+                       break;                                  \
+               old = atomic_cmpxchg((v), c, c + (a));          \
+               if (likely(old == c))                           \
+                       break;                                  \
                c = old;                                        \
+       }                                                       \
        c != (u);                                               \
 })
 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
index 862e497c26453070674af81cd0786f5d1c5ad30d..732d696d31a6590aa3a00647be49ec41d9d4f4aa 100644 (file)
@@ -175,8 +175,14 @@ static inline void atomic_set_mask(unsigned long mask, unsigned long *v)
 ({                                                             \
        int c, old;                                             \
        c = atomic_read(v);                                     \
-       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+       for (;;) {                                              \
+               if (unlikely(c == (u)))                         \
+                       break;                                  \
+               old = atomic_cmpxchg((v), c, c + (a));          \
+               if (likely(old == c))                           \
+                       break;                                  \
                c = old;                                        \
+       }                                                       \
        c != (u);                                               \
 })
 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
index be6fefe223d614e6326c6f210a61a20caa51d0aa..de1d9926aa60386da7e464b59e45e4bb6784a1c1 100644 (file)
@@ -89,10 +89,15 @@ static __inline__ int atomic_cmpxchg(atomic_t *v, int old, int new)
 static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
 {
        int c, old;
-
        c = atomic_read(v);
-       while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
+       for (;;) {
+               if (unlikely(c == u))
+                       break;
+               old = atomic_cmpxchg(v, c, c + a);
+               if (likely(old == c))
+                       break;
                c = old;
+       }
        return c != u;
 }
 
@@ -167,10 +172,15 @@ static __inline__ int atomic64_add_unless(atomic64_t *v,
                                          long long a, long long u)
 {
        long long c, old;
-
        c = atomic64_read(v);
-       while (c != u && (old = atomic64_cmpxchg(v, c, c + a)) != c)
+       for (;;) {
+               if (unlikely(c == u))
+                       break;
+               old = atomic64_cmpxchg(v, c, c + a);
+               if (likely(old == c))
+                       break;
                c = old;
+       }
        return c != u;
 }
 
index 25256bdc8aae3f9a64862f8b73cdf0dad0d7f840..468eb48d814246e0b793b900d459b0ad51422cd6 100644 (file)
@@ -78,9 +78,15 @@ extern int atomic64_sub_ret(int, atomic64_t *);
 ({                                                             \
        int c, old;                                             \
        c = atomic_read(v);                                     \
-       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+       for (;;) {                                              \
+               if (unlikely(c == (u)))                         \
+                       break;                                  \
+               old = atomic_cmpxchg((v), c, c + (a));          \
+               if (likely(old == c))                           \
+                       break;                                  \
                c = old;                                        \
-       c != (u);                                               \
+       }                                                       \
+       likely(c != (u));                                       \
 })
 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
 
index 4b5cd553e772637d157b0af95edbd89d7e12eb1a..cecbf7baa6aa346297efa3bb14480bddd5e4236a 100644 (file)
@@ -405,8 +405,14 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
 ({                                                             \
        int c, old;                                             \
        c = atomic_read(v);                                     \
-       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+       for (;;) {                                              \
+               if (unlikely(c == (u)))                         \
+                       break;                                  \
+               old = atomic_cmpxchg((v), c, c + (a));          \
+               if (likely(old == c))                           \
+                       break;                                  \
                c = old;                                        \
+       }                                                       \
        c != (u);                                               \
 })
 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)