arm64: add missing data types in smp_load_acquire/smp_store_release
authorAndre Przywara <andre.przywara@arm.com>
Mon, 20 Apr 2015 10:14:19 +0000 (11:14 +0100)
committerWill Deacon <will.deacon@arm.com>
Mon, 27 Apr 2015 10:39:04 +0000 (11:39 +0100)
Commit 8053871d0f7f ("smp: Fix smp_call_function_single_async()
locking") introduced a call to smp_load_acquire() with a u16 argument,
but we only cared about u32 and u64 types in that function so far.
This resulted in a compiler warning fortunately, pointing at an
uninitialized use. Due to the implementation structure the compiler
misses that bug in the smp_store_release(), though.
Add the u16 and u8 variants using ldarh/stlrh and ldarb/stlrb,
respectively. Together with the compiletime_assert_atomic_type() check
this should cover all cases now.

Acked-by: Will Deacon <will.deacon@arm.com>
Signed-off-by: Andre Przywara <andre.przywara@arm.com>
Signed-off-by: Will Deacon <will.deacon@arm.com>
arch/arm64/include/asm/barrier.h

index a5abb0062d6e943d67ca4ff5479e887194482367..71f19c4dc0dee8c8a74bbdea252f73432e0b1aa5 100644 (file)
@@ -65,6 +65,14 @@ do {                                                                 \
 do {                                                                   \
        compiletime_assert_atomic_type(*p);                             \
        switch (sizeof(*p)) {                                           \
+       case 1:                                                         \
+               asm volatile ("stlrb %w1, %0"                           \
+                               : "=Q" (*p) : "r" (v) : "memory");      \
+               break;                                                  \
+       case 2:                                                         \
+               asm volatile ("stlrh %w1, %0"                           \
+                               : "=Q" (*p) : "r" (v) : "memory");      \
+               break;                                                  \
        case 4:                                                         \
                asm volatile ("stlr %w1, %0"                            \
                                : "=Q" (*p) : "r" (v) : "memory");      \
@@ -81,6 +89,14 @@ do {                                                                 \
        typeof(*p) ___p1;                                               \
        compiletime_assert_atomic_type(*p);                             \
        switch (sizeof(*p)) {                                           \
+       case 1:                                                         \
+               asm volatile ("ldarb %w0, %1"                           \
+                       : "=r" (___p1) : "Q" (*p) : "memory");          \
+               break;                                                  \
+       case 2:                                                         \
+               asm volatile ("ldarh %w0, %1"                           \
+                       : "=r" (___p1) : "Q" (*p) : "memory");          \
+               break;                                                  \
        case 4:                                                         \
                asm volatile ("ldar %w0, %1"                            \
                        : "=r" (___p1) : "Q" (*p) : "memory");          \