Commit e96ebd58 authored by John David Anglin's avatar John David Anglin Committed by Helge Deller
Browse files

parisc: Implement __smp_store_release and __smp_load_acquire barriers



This patch implements the __smp_store_release and __smp_load_acquire barriers
using ordered stores and loads.  This avoids the sync instruction present in
the generic implementation.

Cc: <stable@vger.kernel.org> # 4.14+
Signed-off-by: default avatarDave Anglin <dave.anglin@bell.net>
Signed-off-by: default avatarHelge Deller <deller@gmx.de>
parent 5b24993c
Loading
Loading
Loading
Loading
+61 −0
Original line number Diff line number Diff line
@@ -26,6 +26,67 @@
#define __smp_rmb()	mb()
#define __smp_wmb()	mb()

#define __smp_store_release(p, v)					\
do {									\
	typeof(p) __p = (p);						\
        union { typeof(*p) __val; char __c[1]; } __u =			\
                { .__val = (__force typeof(*p)) (v) };			\
	compiletime_assert_atomic_type(*p);				\
	switch (sizeof(*p)) {						\
	case 1:								\
		asm volatile("stb,ma %0,0(%1)"				\
				: : "r"(*(__u8 *)__u.__c), "r"(__p)	\
				: "memory");				\
		break;							\
	case 2:								\
		asm volatile("sth,ma %0,0(%1)"				\
				: : "r"(*(__u16 *)__u.__c), "r"(__p)	\
				: "memory");				\
		break;							\
	case 4:								\
		asm volatile("stw,ma %0,0(%1)"				\
				: : "r"(*(__u32 *)__u.__c), "r"(__p)	\
				: "memory");				\
		break;							\
	case 8:								\
		if (IS_ENABLED(CONFIG_64BIT))				\
			asm volatile("std,ma %0,0(%1)"			\
				: : "r"(*(__u64 *)__u.__c), "r"(__p)	\
				: "memory");				\
		break;							\
	}								\
} while (0)

#define __smp_load_acquire(p)						\
({									\
	union { typeof(*p) __val; char __c[1]; } __u;			\
	typeof(p) __p = (p);						\
	compiletime_assert_atomic_type(*p);				\
	switch (sizeof(*p)) {						\
	case 1:								\
		asm volatile("ldb,ma 0(%1),%0"				\
				: "=r"(*(__u8 *)__u.__c) : "r"(__p)	\
				: "memory");				\
		break;							\
	case 2:								\
		asm volatile("ldh,ma 0(%1),%0"				\
				: "=r"(*(__u16 *)__u.__c) : "r"(__p)	\
				: "memory");				\
		break;							\
	case 4:								\
		asm volatile("ldw,ma 0(%1),%0"				\
				: "=r"(*(__u32 *)__u.__c) : "r"(__p)	\
				: "memory");				\
		break;							\
	case 8:								\
		if (IS_ENABLED(CONFIG_64BIT))				\
			asm volatile("ldd,ma 0(%1),%0"			\
				: "=r"(*(__u64 *)__u.__c) : "r"(__p)	\
				: "memory");				\
		break;							\
	}								\
	__u.__val;							\
})
#include <asm-generic/barrier.h>

#endif /* !__ASSEMBLY__ */