sh: Provide movli.l/movco.l-based bitops.
authorPaul Mundt <lethal@linux-sh.org>
Thu, 7 Aug 2008 08:36:12 +0000 (17:36 +0900)
committerPaul Mundt <lethal@linux-sh.org>
Mon, 8 Sep 2008 01:35:02 +0000 (10:35 +0900)
Signed-off-by: Paul Mundt <lethal@linux-sh.org>
arch/sh/include/asm/bitops-llsc.h [new file with mode: 0644]
arch/sh/include/asm/bitops.h

diff --git a/arch/sh/include/asm/bitops-llsc.h b/arch/sh/include/asm/bitops-llsc.h
new file mode 100644 (file)
index 0000000..43b8e1a
--- /dev/null
@@ -0,0 +1,144 @@
+#ifndef __ASM_SH_BITOPS_LLSC_H
+#define __ASM_SH_BITOPS_LLSC_H
+
+static inline void set_bit(int nr, volatile void * addr)
+{
+       int     mask;
+       volatile unsigned int *a = addr;
+       unsigned long tmp;
+
+       a += nr >> 5;
+       mask = 1 << (nr & 0x1f);
+
+       __asm__ __volatile__ (
+               "1:                                             \n\t"
+               "movli.l        @%1, %0 ! set_bit               \n\t"
+               "or             %3, %0                          \n\t"
+               "movco.l        %0, @%1                         \n\t"
+               "bf             1b                              \n\t"
+               : "=&z" (tmp), "=r" (a)
+               : "1" (a), "r" (mask)
+               : "t", "memory"
+       );
+}
+
+static inline void clear_bit(int nr, volatile void * addr)
+{
+       int     mask;
+       volatile unsigned int *a = addr;
+       unsigned long tmp;
+
+       a += nr >> 5;
+       mask = 1 << (nr & 0x1f);
+
+       __asm__ __volatile__ (
+               "1:                                             \n\t"
+               "movli.l        @%1, %0 ! clear_bit             \n\t"
+               "and            %3, %0                          \n\t"
+               "movco.l        %0, @%1                         \n\t"
+               "bf             1b                              \n\t"
+               : "=&z" (tmp), "=r" (a)
+               : "1" (a), "r" (~mask)
+               : "t", "memory"
+       );
+}
+
+static inline void change_bit(int nr, volatile void * addr)
+{
+       int     mask;
+       volatile unsigned int *a = addr;
+       unsigned long tmp;
+
+       a += nr >> 5;
+       mask = 1 << (nr & 0x1f);
+
+       __asm__ __volatile__ (
+               "1:                                             \n\t"
+               "movli.l        @%1, %0 ! change_bit            \n\t"
+               "xor            %3, %0                          \n\t"
+               "movco.l        %0, @%1                         \n\t"
+               "bf             1b                              \n\t"
+               : "=&z" (tmp), "=r" (a)
+               : "1" (a), "r" (mask)
+               : "t", "memory"
+       );
+}
+
+static inline int test_and_set_bit(int nr, volatile void * addr)
+{
+       int     mask, retval;
+       volatile unsigned int *a = addr;
+       unsigned long tmp;
+
+       a += nr >> 5;
+       mask = 1 << (nr & 0x1f);
+
+       __asm__ __volatile__ (
+               "1:                                             \n\t"
+               "movli.l        @%1, %0 ! test_and_set_bit      \n\t"
+               "mov            %0, %2                          \n\t"
+               "or             %4, %0                          \n\t"
+               "movco.l        %0, @%1                         \n\t"
+               "bf             1b                              \n\t"
+               "and            %4, %2                          \n\t"
+               : "=&z" (tmp), "=r" (a), "=&r" (retval)
+               : "1" (a), "r" (mask)
+               : "t", "memory"
+       );
+
+       return retval != 0;
+}
+
+static inline int test_and_clear_bit(int nr, volatile void * addr)
+{
+       int     mask, retval;
+       volatile unsigned int *a = addr;
+       unsigned long tmp;
+
+       a += nr >> 5;
+       mask = 1 << (nr & 0x1f);
+
+       __asm__ __volatile__ (
+               "1:                                             \n\t"
+               "movli.l        @%1, %0 ! test_and_clear_bit    \n\t"
+               "mov            %0, %2                          \n\t"
+               "and            %5, %0                          \n\t"
+               "movco.l        %0, @%1                         \n\t"
+               "bf             1b                              \n\t"
+               "and            %4, %2                          \n\t"
+               "synco                                          \n\t"
+               : "=&z" (tmp), "=r" (a), "=&r" (retval)
+               : "1" (a), "r" (mask), "r" (~mask)
+               : "t", "memory"
+       );
+
+       return retval != 0;
+}
+
+static inline int test_and_change_bit(int nr, volatile void * addr)
+{
+       int     mask, retval;
+       volatile unsigned int *a = addr;
+       unsigned long tmp;
+
+       a += nr >> 5;
+       mask = 1 << (nr & 0x1f);
+
+       __asm__ __volatile__ (
+               "1:                                             \n\t"
+               "movli.l        @%1, %0 ! test_and_change_bit   \n\t"
+               "mov            %0, %2                          \n\t"
+               "xor            %4, %0                          \n\t"
+               "movco.l        %0, @%1                         \n\t"
+               "bf             1b                              \n\t"
+               "and            %4, %2                          \n\t"
+               "synco                                          \n\t"
+               : "=&z" (tmp), "=r" (a), "=&r" (retval)
+               : "1" (a), "r" (mask)
+               : "t", "memory"
+       );
+
+       return retval != 0;
+}
+
+#endif /* __ASM_SH_BITOPS_LLSC_H */
index d7d382f63ee53a5c6a59a7033169ba6bc366cb8f..367930d8e5aeff378a49db60bf367af0b89bf235 100644 (file)
@@ -13,6 +13,8 @@
 
 #ifdef CONFIG_GUSA_RB
 #include <asm/bitops-grb.h>
+#elif defined(CONFIG_CPU_SH4A)
+#include <asm/bitops-llsc.h>
 #else
 #include <asm/bitops-irq.h>
 #endif