ARM: 7670/1: fix the memset fix
authorNicolas Pitre <nicolas.pitre@linaro.org>
Tue, 12 Mar 2013 12:00:42 +0000 (13:00 +0100)
committerRussell King <rmk+kernel@arm.linux.org.uk>
Tue, 12 Mar 2013 12:18:47 +0000 (12:18 +0000)
Commit 455bd4c430b0 ("ARM: 7668/1: fix memset-related crashes caused by
recent GCC (4.7.2) optimizations") attempted to fix a compliance issue
with the memset return value.  However the memset itself became broken
by that patch for misaligned pointers.

This fixes the above by branching over the entry code from the
misaligned fixup code to avoid reloading the original pointer.

Also, because the function entry alignment is wrong in the Thumb mode
compilation, that fixup code is moved to the end.

While at it, the entry instructions are slightly reworked to help dual
issue pipelines.

Signed-off-by: Nicolas Pitre <nico@linaro.org>
Tested-by: Alexander Holler <holler@ahsoftware.de>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
arch/arm/lib/memset.S

index d912e7397ecc94a190a132b649f667b94774c1b9..94b0650ea98fd42492c280d20c7610382a2f4a81 100644 (file)
 
        .text
        .align  5
-       .word   0
-
-1:     subs    r2, r2, #4              @ 1 do we have enough
-       blt     5f                      @ 1 bytes to align with?
-       cmp     r3, #2                  @ 1
-       strltb  r1, [ip], #1            @ 1
-       strleb  r1, [ip], #1            @ 1
-       strb    r1, [ip], #1            @ 1
-       add     r2, r2, r3              @ 1 (r2 = r2 - (4 - r3))
-/*
- * The pointer is now aligned and the length is adjusted.  Try doing the
- * memset again.
- */
 
 ENTRY(memset)
-/*
- * Preserve the contents of r0 for the return value.
- */
-       mov     ip, r0
-       ands    r3, ip, #3              @ 1 unaligned?
-       bne     1b                      @ 1
+       ands    r3, r0, #3              @ 1 unaligned?
+       mov     ip, r0                  @ preserve r0 as return value
+       bne     6f                      @ 1
 /*
  * we know that the pointer in ip is aligned to a word boundary.
  */
-       orr     r1, r1, r1, lsl #8
+1:     orr     r1, r1, r1, lsl #8
        orr     r1, r1, r1, lsl #16
        mov     r3, r1
        cmp     r2, #16
@@ -127,4 +111,13 @@ ENTRY(memset)
        tst     r2, #1
        strneb  r1, [ip], #1
        mov     pc, lr
+
+6:     subs    r2, r2, #4              @ 1 do we have enough
+       blt     5b                      @ 1 bytes to align with?
+       cmp     r3, #2                  @ 1
+       strltb  r1, [ip], #1            @ 1
+       strleb  r1, [ip], #1            @ 1
+       strb    r1, [ip], #1            @ 1
+       add     r2, r2, r3              @ 1 (r2 = r2 - (4 - r3))
+       b       1b
 ENDPROC(memset)