Commit 131e135f authored by Mark Rutland's avatar Mark Rutland Committed by Will Deacon
Browse files

arm64: instrument smp_{load_acquire,store_release}



Our __smp_store_release() and __smp_load_acquire() macros use inline
assembly, which is opaque to kasan. This means that kasan can't catch
erroneous use of these.

This patch adds kasan instrumentation to both.

Cc: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
[will: consistently use *p as argument to sizeof]
Signed-off-by: default avatarWill Deacon <will.deacon@arm.com>
parent eea1bb22
Loading
Loading
Loading
Loading
+15 −9
Original line number Original line Diff line number Diff line
@@ -20,6 +20,8 @@


#ifndef __ASSEMBLY__
#ifndef __ASSEMBLY__


#include <linux/kasan-checks.h>

#define __nops(n)	".rept	" #n "\nnop\n.endr\n"
#define __nops(n)	".rept	" #n "\nnop\n.endr\n"
#define nops(n)		asm volatile(__nops(n))
#define nops(n)		asm volatile(__nops(n))


@@ -72,31 +74,33 @@ static inline unsigned long array_index_mask_nospec(unsigned long idx,


#define __smp_store_release(p, v)					\
#define __smp_store_release(p, v)					\
do {									\
do {									\
	typeof(p) __p = (p);						\
	union { typeof(*p) __val; char __c[1]; } __u =			\
	union { typeof(*p) __val; char __c[1]; } __u =			\
		{ .__val = (__force typeof(*p)) (v) };			\
		{ .__val = (__force typeof(*p)) (v) };			\
	compiletime_assert_atomic_type(*p);				\
	compiletime_assert_atomic_type(*p);				\
	kasan_check_write(__p, sizeof(*p));				\
	switch (sizeof(*p)) {						\
	switch (sizeof(*p)) {						\
	case 1:								\
	case 1:								\
		asm volatile ("stlrb %w1, %0"				\
		asm volatile ("stlrb %w1, %0"				\
				: "=Q" (*p)				\
				: "=Q" (*__p)				\
				: "r" (*(__u8 *)__u.__c)		\
				: "r" (*(__u8 *)__u.__c)		\
				: "memory");				\
				: "memory");				\
		break;							\
		break;							\
	case 2:								\
	case 2:								\
		asm volatile ("stlrh %w1, %0"				\
		asm volatile ("stlrh %w1, %0"				\
				: "=Q" (*p)				\
				: "=Q" (*__p)				\
				: "r" (*(__u16 *)__u.__c)		\
				: "r" (*(__u16 *)__u.__c)		\
				: "memory");				\
				: "memory");				\
		break;							\
		break;							\
	case 4:								\
	case 4:								\
		asm volatile ("stlr %w1, %0"				\
		asm volatile ("stlr %w1, %0"				\
				: "=Q" (*p)				\
				: "=Q" (*__p)				\
				: "r" (*(__u32 *)__u.__c)		\
				: "r" (*(__u32 *)__u.__c)		\
				: "memory");				\
				: "memory");				\
		break;							\
		break;							\
	case 8:								\
	case 8:								\
		asm volatile ("stlr %1, %0"				\
		asm volatile ("stlr %1, %0"				\
				: "=Q" (*p)				\
				: "=Q" (*__p)				\
				: "r" (*(__u64 *)__u.__c)		\
				: "r" (*(__u64 *)__u.__c)		\
				: "memory");				\
				: "memory");				\
		break;							\
		break;							\
@@ -106,27 +110,29 @@ do { \
#define __smp_load_acquire(p)						\
#define __smp_load_acquire(p)						\
({									\
({									\
	union { typeof(*p) __val; char __c[1]; } __u;			\
	union { typeof(*p) __val; char __c[1]; } __u;			\
	typeof(p) __p = (p);						\
	compiletime_assert_atomic_type(*p);				\
	compiletime_assert_atomic_type(*p);				\
	kasan_check_read(__p, sizeof(*p));				\
	switch (sizeof(*p)) {						\
	switch (sizeof(*p)) {						\
	case 1:								\
	case 1:								\
		asm volatile ("ldarb %w0, %1"				\
		asm volatile ("ldarb %w0, %1"				\
			: "=r" (*(__u8 *)__u.__c)			\
			: "=r" (*(__u8 *)__u.__c)			\
			: "Q" (*p) : "memory");				\
			: "Q" (*__p) : "memory");			\
		break;							\
		break;							\
	case 2:								\
	case 2:								\
		asm volatile ("ldarh %w0, %1"				\
		asm volatile ("ldarh %w0, %1"				\
			: "=r" (*(__u16 *)__u.__c)			\
			: "=r" (*(__u16 *)__u.__c)			\
			: "Q" (*p) : "memory");				\
			: "Q" (*__p) : "memory");			\
		break;							\
		break;							\
	case 4:								\
	case 4:								\
		asm volatile ("ldar %w0, %1"				\
		asm volatile ("ldar %w0, %1"				\
			: "=r" (*(__u32 *)__u.__c)			\
			: "=r" (*(__u32 *)__u.__c)			\
			: "Q" (*p) : "memory");				\
			: "Q" (*__p) : "memory");			\
		break;							\
		break;							\
	case 8:								\
	case 8:								\
		asm volatile ("ldar %0, %1"				\
		asm volatile ("ldar %0, %1"				\
			: "=r" (*(__u64 *)__u.__c)			\
			: "=r" (*(__u64 *)__u.__c)			\
			: "Q" (*p) : "memory");				\
			: "Q" (*__p) : "memory");			\
		break;							\
		break;							\
	}								\
	}								\
	__u.__val;							\
	__u.__val;							\