From: Paul Burton paul.burton@mips.com
[ Upstream commit 3c1d3f0979721a39dd2980c97466127ce65aa130 ]
Generate the sync instructions required to workaround Loongson3 LL/SC errata within inline asm blocks, which feels a little safer than doing it from C where strictly speaking the compiler would be well within its rights to insert a memory access between the separate asm statements we previously had, containing sync & ll instructions respectively.
Signed-off-by: Paul Burton paul.burton@mips.com Cc: linux-mips@vger.kernel.org Cc: Huacai Chen chenhc@lemote.com Cc: Jiaxun Yang jiaxun.yang@flygoat.com Cc: linux-kernel@vger.kernel.org Signed-off-by: Sasha Levin sashal@kernel.org --- arch/mips/include/asm/barrier.h | 13 +++++++------ arch/mips/include/asm/futex.h | 15 +++++++-------- 2 files changed, 14 insertions(+), 14 deletions(-)
diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h index 9228f73862205..fb842965d541d 100644 --- a/arch/mips/include/asm/barrier.h +++ b/arch/mips/include/asm/barrier.h @@ -218,13 +218,14 @@ * ordering will be done by smp_llsc_mb() and friends. */ #if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) -#define __WEAK_LLSC_MB " sync \n" -#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") -#define __LLSC_CLOBBER +# define __WEAK_LLSC_MB sync +# define smp_llsc_mb() \ + __asm__ __volatile__(__stringify(__WEAK_LLSC_MB) : : :"memory") +# define __LLSC_CLOBBER #else -#define __WEAK_LLSC_MB " \n" -#define smp_llsc_mb() do { } while (0) -#define __LLSC_CLOBBER "memory" +# define __WEAK_LLSC_MB +# define smp_llsc_mb() do { } while (0) +# define __LLSC_CLOBBER "memory" #endif
#ifdef CONFIG_CPU_CAVIUM_OCTEON diff --git a/arch/mips/include/asm/futex.h b/arch/mips/include/asm/futex.h index b83b0397462d9..54cf205309316 100644 --- a/arch/mips/include/asm/futex.h +++ b/arch/mips/include/asm/futex.h @@ -16,6 +16,7 @@ #include <asm/barrier.h> #include <asm/compiler.h> #include <asm/errno.h> +#include <asm/sync.h> #include <asm/war.h>
#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ @@ -32,7 +33,7 @@ " .set arch=r4000 \n" \ "2: sc $1, %2 \n" \ " beqzl $1, 1b \n" \ - __WEAK_LLSC_MB \ + __stringify(__WEAK_LLSC_MB) \ "3: \n" \ " .insn \n" \ " .set pop \n" \ @@ -50,19 +51,19 @@ "i" (-EFAULT) \ : "memory"); \ } else if (cpu_has_llsc) { \ - loongson_llsc_mb(); \ __asm__ __volatile__( \ " .set push \n" \ " .set noat \n" \ " .set push \n" \ " .set "MIPS_ISA_ARCH_LEVEL" \n" \ + " " __SYNC(full, loongson3_war) " \n" \ "1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \ " .set pop \n" \ " " insn " \n" \ " .set "MIPS_ISA_ARCH_LEVEL" \n" \ "2: "user_sc("$1", "%2")" \n" \ " beqz $1, 1b \n" \ - __WEAK_LLSC_MB \ + __stringify(__WEAK_LLSC_MB) \ "3: \n" \ " .insn \n" \ " .set pop \n" \ @@ -147,7 +148,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, " .set arch=r4000 \n" "2: sc $1, %2 \n" " beqzl $1, 1b \n" - __WEAK_LLSC_MB + __stringify(__WEAK_LLSC_MB) "3: \n" " .insn \n" " .set pop \n" @@ -164,13 +165,13 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, "i" (-EFAULT) : "memory"); } else if (cpu_has_llsc) { - loongson_llsc_mb(); __asm__ __volatile__( "# futex_atomic_cmpxchg_inatomic \n" " .set push \n" " .set noat \n" " .set push \n" " .set "MIPS_ISA_ARCH_LEVEL" \n" + " " __SYNC(full, loongson3_war) " \n" "1: "user_ll("%1", "%3")" \n" " bne %1, %z4, 3f \n" " .set pop \n" @@ -178,8 +179,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, " .set "MIPS_ISA_ARCH_LEVEL" \n" "2: "user_sc("$1", "%2")" \n" " beqz $1, 1b \n" - __WEAK_LLSC_MB - "3: \n" + "3: " __SYNC_ELSE(full, loongson3_war, __WEAK_LLSC_MB) "\n" " .insn \n" " .set pop \n" " .section .fixup,"ax" \n" @@ -194,7 +194,6 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT) : "memory"); - loongson_llsc_mb(); } else return -ENOSYS;