Move the asm-generic/system.h xchg() implementation to asm-generic/cmpxchg.h
authorDavid Howells <dhowells@redhat.com>
Wed, 28 Mar 2012 17:30:03 +0000 (18:30 +0100)
committerDavid Howells <dhowells@redhat.com>
Wed, 28 Mar 2012 17:30:03 +0000 (18:30 +0100)
Move the asm-generic/system.h xchg() implementation to asm-generic/cmpxchg.h
to simplify disintegration of asm/system.h.

Signed-off-by: David Howells <dhowells@redhat.com>
Acked-by: Arnd Bergmann <arnd@arndb.de>
include/asm-generic/cmpxchg.h
include/asm-generic/system.h

index e0449af91f607207738907419482c29838aed56c..8a361834dc25887e658107a3bf5dbf5f8142752d 100644 (file)
@@ -1,16 +1,87 @@
+/*
+ * Generic UP xchg and cmpxchg using interrupt disablement.  Does not
+ * support SMP.
+ */
+
 #ifndef __ASM_GENERIC_CMPXCHG_H
 #define __ASM_GENERIC_CMPXCHG_H
 
-/*
- * Generic cmpxchg
- *
- * Uses the local cmpxchg. Does not support SMP.
- */
 #ifdef CONFIG_SMP
 #error "Cannot use generic cmpxchg on SMP"
 #endif
 
-#include <asm-generic/cmpxchg-local.h>
+#include <linux/irqflags.h>
+
+#ifndef xchg
+
+/*
+ * This function doesn't exist, so you'll get a linker error if
+ * something tries to do an invalidly-sized xchg().
+ */
+extern void __xchg_called_with_bad_pointer(void);
+
+static inline
+unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
+{
+       unsigned long ret, flags;
+
+       switch (size) {
+       case 1:
+#ifdef __xchg_u8
+               return __xchg_u8(x, ptr);
+#else
+               local_irq_save(flags);
+               ret = *(volatile u8 *)ptr;
+               *(volatile u8 *)ptr = x;
+               local_irq_restore(flags);
+               return ret;
+#endif /* __xchg_u8 */
+
+       case 2:
+#ifdef __xchg_u16
+               return __xchg_u16(x, ptr);
+#else
+               local_irq_save(flags);
+               ret = *(volatile u16 *)ptr;
+               *(volatile u16 *)ptr = x;
+               local_irq_restore(flags);
+               return ret;
+#endif /* __xchg_u16 */
+
+       case 4:
+#ifdef __xchg_u32
+               return __xchg_u32(x, ptr);
+#else
+               local_irq_save(flags);
+               ret = *(volatile u32 *)ptr;
+               *(volatile u32 *)ptr = x;
+               local_irq_restore(flags);
+               return ret;
+#endif /* __xchg_u32 */
+
+#ifdef CONFIG_64BIT
+       case 8:
+#ifdef __xchg_u64
+               return __xchg_u64(x, ptr);
+#else
+               local_irq_save(flags);
+               ret = *(volatile u64 *)ptr;
+               *(volatile u64 *)ptr = x;
+               local_irq_restore(flags);
+               return ret;
+#endif /* __xchg_u64 */
+#endif /* CONFIG_64BIT */
+
+       default:
+               __xchg_called_with_bad_pointer();
+               return x;
+       }
+}
+
+#define xchg(ptr, x) \
+       ((__typeof__(*(ptr))) __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
+
+#endif /* xchg */
 
 /*
  * Atomic compare and exchange.
@@ -18,7 +89,9 @@
  * Do not define __HAVE_ARCH_CMPXCHG because we want to use it to check whether
  * a cmpxchg primitive faster than repeated local irq save/restore exists.
  */
+#include <asm-generic/cmpxchg-local.h>
+
 #define cmpxchg(ptr, o, n)     cmpxchg_local((ptr), (o), (n))
 #define cmpxchg64(ptr, o, n)   cmpxchg64_local((ptr), (o), (n))
 
-#endif
+#endif /* __ASM_GENERIC_CMPXCHG_H */
index f98f693383c438ef6eddf1cd77797bf6e45cd762..54cd124ea9a4e7beab8f1533d43a363e7f21c3fb 100644 (file)
@@ -17,7 +17,6 @@
 #ifndef __ASSEMBLY__
 
 #include <linux/types.h>
-#include <linux/irqflags.h>
 
 #include <asm/barrier.h>
 #include <asm/cmpxchg.h>
@@ -34,75 +33,6 @@ extern struct task_struct *__switch_to(struct task_struct *,
 
 #define arch_align_stack(x) (x)
 
-/*
- * we make sure local_irq_enable() doesn't cause priority inversion
- */
-
-/* This function doesn't exist, so you'll get a linker error
- *    if something tries to do an invalid xchg().  */
-extern void __xchg_called_with_bad_pointer(void);
-
-static inline
-unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
-{
-       unsigned long ret, flags;
-
-       switch (size) {
-       case 1:
-#ifdef __xchg_u8
-               return __xchg_u8(x, ptr);
-#else
-               local_irq_save(flags);
-               ret = *(volatile u8 *)ptr;
-               *(volatile u8 *)ptr = x;
-               local_irq_restore(flags);
-               return ret;
-#endif /* __xchg_u8 */
-
-       case 2:
-#ifdef __xchg_u16
-               return __xchg_u16(x, ptr);
-#else
-               local_irq_save(flags);
-               ret = *(volatile u16 *)ptr;
-               *(volatile u16 *)ptr = x;
-               local_irq_restore(flags);
-               return ret;
-#endif /* __xchg_u16 */
-
-       case 4:
-#ifdef __xchg_u32
-               return __xchg_u32(x, ptr);
-#else
-               local_irq_save(flags);
-               ret = *(volatile u32 *)ptr;
-               *(volatile u32 *)ptr = x;
-               local_irq_restore(flags);
-               return ret;
-#endif /* __xchg_u32 */
-
-#ifdef CONFIG_64BIT
-       case 8:
-#ifdef __xchg_u64
-               return __xchg_u64(x, ptr);
-#else
-               local_irq_save(flags);
-               ret = *(volatile u64 *)ptr;
-               *(volatile u64 *)ptr = x;
-               local_irq_restore(flags);
-               return ret;
-#endif /* __xchg_u64 */
-#endif /* CONFIG_64BIT */
-
-       default:
-               __xchg_called_with_bad_pointer();
-               return x;
-       }
-}
-
-#define xchg(ptr, x) \
-       ((__typeof__(*(ptr))) __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
-
 #endif /* !__ASSEMBLY__ */
 
 #endif /* __ASM_GENERIC_SYSTEM_H */