#define wmb() fast_wmb()
#define rmb() fast_rmb()
-#if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP)
+#if defined(CONFIG_WEAK_ORDERING)
# ifdef CONFIG_CPU_CAVIUM_OCTEON
-# define smp_mb() __sync()
-# define smp_rmb() barrier()
-# define smp_wmb() __syncw()
+# define __smp_mb() __sync()
+# define __smp_rmb() barrier()
+# define __smp_wmb() __syncw()
# else
-# define smp_mb() __asm__ __volatile__("sync" : : :"memory")
-# define smp_rmb() __asm__ __volatile__("sync" : : :"memory")
-# define smp_wmb() __asm__ __volatile__("sync" : : :"memory")
+# define __smp_mb() __asm__ __volatile__("sync" : : :"memory")
+# define __smp_rmb() __asm__ __volatile__("sync" : : :"memory")
+# define __smp_wmb() __asm__ __volatile__("sync" : : :"memory")
# endif
#else
-#define smp_mb() barrier()
-#define smp_rmb() barrier()
-#define smp_wmb() barrier()
+#define __smp_mb() barrier()
+#define __smp_rmb() barrier()
+#define __smp_wmb() barrier()
#endif
#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
#ifdef CONFIG_CPU_CAVIUM_OCTEON
#define smp_mb__before_llsc() smp_wmb()
+#define __smp_mb__before_llsc() __smp_wmb()
/* Cause previous writes to become visible on all CPUs as soon as possible */
#define nudge_writes() __asm__ __volatile__(".set push\n\t" \
".set arch=octeon\n\t" \
".set pop" : : : "memory")
#else
#define smp_mb__before_llsc() smp_llsc_mb()
+#define __smp_mb__before_llsc() smp_llsc_mb()
#define nudge_writes() mb()
#endif
-#define smp_mb__before_atomic() smp_mb__before_llsc()
-#define smp_mb__after_atomic() smp_llsc_mb()
+#define __smp_mb__before_atomic() __smp_mb__before_llsc()
+#define __smp_mb__after_atomic() smp_llsc_mb()
#include <asm-generic/barrier.h>