MIPS: Remove further use of .subsection
authorRalf Baechle <ralf@linux-mips.org>
Wed, 10 Apr 2013 22:16:53 +0000 (00:16 +0200)
committerRalf Baechle <ralf@linux-mips.org>
Thu, 11 Apr 2013 13:39:52 +0000 (15:39 +0200)
7837314d141c661c70bc13c5050694413ecfe14a [MIPS: Get rid of branches to
.subsections] removed most uses of .subsection] removed most uses of
.subsection in inline assembler code.

It left the instances in spinlock.h alone because we knew their use was
in fairly small files where .subsection use was fine but of course this
was a fragile assumption.  LTO breaks this assumption resulting in build
errors due to exceeded branch range, so remove further instances of
.subsection.

The two functions that still use .macro don't currently cause issues
however this use is still fragile.

Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
arch/mips/include/asm/spinlock.h

index 5130c88d64204b398be70c968823d408bb27f93c..0b1dbd22e6f87812e6908210c10d21659468ac13 100644 (file)
@@ -242,25 +242,16 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
                : "m" (rw->lock)
                : "memory");
        } else {
-               __asm__ __volatile__(
-               "       .set    noreorder       # arch_read_lock        \n"
-               "1:     ll      %1, %2                                  \n"
-               "       bltz    %1, 3f                                  \n"
-               "        addu   %1, 1                                   \n"
-               "2:     sc      %1, %0                                  \n"
-               "       beqz    %1, 1b                                  \n"
-               "        nop                                            \n"
-               "       .subsection 2                                   \n"
-               "3:     ll      %1, %2                                  \n"
-               "       bltz    %1, 3b                                  \n"
-               "        addu   %1, 1                                   \n"
-               "       b       2b                                      \n"
-               "        nop                                            \n"
-               "       .previous                                       \n"
-               "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
-               : "memory");
+               do {
+                       __asm__ __volatile__(
+                       "1:     ll      %1, %2  # arch_read_lock        \n"
+                       "       bltz    %1, 1b                          \n"
+                       "        addu   %1, 1                           \n"
+                       "2:     sc      %1, %0                          \n"
+                       : "=m" (rw->lock), "=&r" (tmp)
+                       : "m" (rw->lock)
+                       : "memory");
+               } while (unlikely(!tmp));
        }
 
        smp_llsc_mb();
@@ -285,21 +276,15 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
                : "m" (rw->lock)
                : "memory");
        } else {
-               __asm__ __volatile__(
-               "       .set    noreorder       # arch_read_unlock      \n"
-               "1:     ll      %1, %2                                  \n"
-               "       sub     %1, 1                                   \n"
-               "       sc      %1, %0                                  \n"
-               "       beqz    %1, 2f                                  \n"
-               "        nop                                            \n"
-               "       .subsection 2                                   \n"
-               "2:     b       1b                                      \n"
-               "        nop                                            \n"
-               "       .previous                                       \n"
-               "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
-               : "memory");
+               do {
+                       __asm__ __volatile__(
+                       "1:     ll      %1, %2  # arch_read_unlock      \n"
+                       "       sub     %1, 1                           \n"
+                       "       sc      %1, %0                          \n"
+                       : "=m" (rw->lock), "=&r" (tmp)
+                       : "m" (rw->lock)
+                       : "memory");
+               } while (unlikely(!tmp));
        }
 }
 
@@ -321,25 +306,16 @@ static inline void arch_write_lock(arch_rwlock_t *rw)
                : "m" (rw->lock)
                : "memory");
        } else {
-               __asm__ __volatile__(
-               "       .set    noreorder       # arch_write_lock       \n"
-               "1:     ll      %1, %2                                  \n"
-               "       bnez    %1, 3f                                  \n"
-               "        lui    %1, 0x8000                              \n"
-               "2:     sc      %1, %0                                  \n"
-               "       beqz    %1, 3f                                  \n"
-               "        nop                                            \n"
-               "       .subsection 2                                   \n"
-               "3:     ll      %1, %2                                  \n"
-               "       bnez    %1, 3b                                  \n"
-               "        lui    %1, 0x8000                              \n"
-               "       b       2b                                      \n"
-               "        nop                                            \n"
-               "       .previous                                       \n"
-               "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
-               : "memory");
+               do {
+                       __asm__ __volatile__(
+                       "1:     ll      %1, %2  # arch_write_lock       \n"
+                       "       bnez    %1, 1b                          \n"
+                       "        lui    %1, 0x8000                      \n"
+                       "2:     sc      %1, %0                          \n"
+                       : "=m" (rw->lock), "=&r" (tmp)
+                       : "m" (rw->lock)
+                       : "memory");
+               } while (unlikely(!tmp));
        }
 
        smp_llsc_mb();
@@ -424,25 +400,21 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
                : "m" (rw->lock)
                : "memory");
        } else {
-               __asm__ __volatile__(
-               "       .set    noreorder       # arch_write_trylock    \n"
-               "       li      %2, 0                                   \n"
-               "1:     ll      %1, %3                                  \n"
-               "       bnez    %1, 2f                                  \n"
-               "       lui     %1, 0x8000                              \n"
-               "       sc      %1, %0                                  \n"
-               "       beqz    %1, 3f                                  \n"
-               "        li     %2, 1                                   \n"
-               "2:                                                     \n"
-               __WEAK_LLSC_MB
-               "       .subsection 2                                   \n"
-               "3:     b       1b                                      \n"
-               "        li     %2, 0                                   \n"
-               "       .previous                                       \n"
-               "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
-               : "m" (rw->lock)
-               : "memory");
+               do {
+                       __asm__ __volatile__(
+                       "       ll      %1, %3  # arch_write_trylock    \n"
+                       "       li      %2, 0                           \n"
+                       "       bnez    %1, 2f                          \n"
+                       "       lui     %1, 0x8000                      \n"
+                       "       sc      %1, %0                          \n"
+                       "       li      %2, 1                           \n"
+                       "2:                                             \n"
+                       : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
+                       : "m" (rw->lock)
+                       : "memory");
+               } while (unlikely(!tmp));
+
+               smp_llsc_mb();
        }
 
        return ret;