sparc64: Use indirect calls in hamming weight stubs
authorDavid S. Miller <davem@davemloft.net>
Thu, 22 Jun 2017 14:56:48 +0000 (10:56 -0400)
committerMasahiro Yamada <yamada.masahiro@socionext.com>
Thu, 29 Jun 2017 23:59:55 +0000 (08:59 +0900)
Otherwise, depending upon link order, the branch relocation
limits could be exceeded.

Signed-off-by: David S. Miller <davem@davemloft.net>
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
arch/sparc/lib/hweight.S

index f9985f129fb68e2c599b944a3be5c0a4ebddb8b3..d21cf20e5c1eb234fa083d883ded321c4297d6ef 100644 (file)
@@ -4,9 +4,9 @@
        .text
        .align  32
 ENTRY(__arch_hweight8)
-       ba,pt   %xcc, __sw_hweight8
+       sethi   %hi(__sw_hweight8), %g1
+       jmpl    %g1 + %lo(__sw_hweight8), %g0
         nop
-       nop
 ENDPROC(__arch_hweight8)
 EXPORT_SYMBOL(__arch_hweight8)
        .section        .popc_3insn_patch, "ax"
@@ -17,9 +17,9 @@ EXPORT_SYMBOL(__arch_hweight8)
        .previous
 
 ENTRY(__arch_hweight16)
-       ba,pt   %xcc, __sw_hweight16
+       sethi   %hi(__sw_hweight16), %g1
+       jmpl    %g1 + %lo(__sw_hweight16), %g0
         nop
-       nop
 ENDPROC(__arch_hweight16)
 EXPORT_SYMBOL(__arch_hweight16)
        .section        .popc_3insn_patch, "ax"
@@ -30,9 +30,9 @@ EXPORT_SYMBOL(__arch_hweight16)
        .previous
 
 ENTRY(__arch_hweight32)
-       ba,pt   %xcc, __sw_hweight32
+       sethi   %hi(__sw_hweight32), %g1
+       jmpl    %g1 + %lo(__sw_hweight32), %g0
         nop
-       nop
 ENDPROC(__arch_hweight32)
 EXPORT_SYMBOL(__arch_hweight32)
        .section        .popc_3insn_patch, "ax"
@@ -43,9 +43,9 @@ EXPORT_SYMBOL(__arch_hweight32)
        .previous
 
 ENTRY(__arch_hweight64)
-       ba,pt   %xcc, __sw_hweight64
+       sethi   %hi(__sw_hweight16), %g1
+       jmpl    %g1 + %lo(__sw_hweight16), %g0
         nop
-       nop
 ENDPROC(__arch_hweight64)
 EXPORT_SYMBOL(__arch_hweight64)
        .section        .popc_3insn_patch, "ax"