Unverified Commit cc99987c authored by Paul Burton's avatar Paul Burton
Browse files

MIPS: bitops: Abstract LL/SC loops



Introduce __bit_op() & __test_bit_op() macros which abstract away the
implementation of LL/SC loops. This cuts down on a lot of duplicate
boilerplate code, and also allows R10000_LLSC_WAR to be handled outside
of the individual bitop functions.

Signed-off-by: default avatarPaul Burton <paul.burton@mips.com>
Cc: linux-mips@vger.kernel.org
Cc: Huacai Chen <chenhc@lemote.com>
Cc: Jiaxun Yang <jiaxun.yang@flygoat.com>
Cc: linux-kernel@vger.kernel.org
parent aad028ca
Loading
Loading
Loading
Loading
+63 −204
Original line number Original line Diff line number Diff line
@@ -25,6 +25,41 @@
#include <asm/sgidefs.h>
#include <asm/sgidefs.h>
#include <asm/war.h>
#include <asm/war.h>


#define __bit_op(mem, insn, inputs...) do {			\
	unsigned long temp;					\
								\
	asm volatile(						\
	"	.set		push			\n"	\
	"	.set		" MIPS_ISA_LEVEL "	\n"	\
	"1:	" __LL		"%0, %1			\n"	\
	"	" insn		"			\n"	\
	"	" __SC		"%0, %1			\n"	\
	"	" __SC_BEQZ	"%0, 1b			\n"	\
	"	.set		pop			\n"	\
	: "=&r"(temp), "+" GCC_OFF_SMALL_ASM()(mem)		\
	: inputs						\
	: __LLSC_CLOBBER);					\
} while (0)

#define __test_bit_op(mem, ll_dst, insn, inputs...) ({		\
	unsigned long orig, temp;				\
								\
	asm volatile(						\
	"	.set		push			\n"	\
	"	.set		" MIPS_ISA_LEVEL "	\n"	\
	"1:	" __LL		ll_dst ", %2		\n"	\
	"	" insn		"			\n"	\
	"	" __SC		"%1, %2			\n"	\
	"	" __SC_BEQZ	"%1, 1b			\n"	\
	"	.set		pop			\n"	\
	: "=&r"(orig), "=&r"(temp),				\
	  "+" GCC_OFF_SMALL_ASM()(mem)				\
	: inputs						\
	: __LLSC_CLOBBER);					\
								\
	orig;							\
})

/*
/*
 * These are the "slower" versions of the functions and are in bitops.c.
 * These are the "slower" versions of the functions and are in bitops.c.
 * These functions call raw_local_irq_{save,restore}().
 * These functions call raw_local_irq_{save,restore}().
@@ -54,55 +89,20 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
{
{
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	int bit = nr & SZLONG_MASK;
	int bit = nr & SZLONG_MASK;
	unsigned long temp;


	if (!kernel_uses_llsc) {
	if (!kernel_uses_llsc) {
		__mips_set_bit(nr, addr);
		__mips_set_bit(nr, addr);
		return;
		return;
	}
	}


	if (R10000_LLSC_WAR) {
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	arch=r4000				\n"
		"1:	" __LL "%0, %1			# set_bit	\n"
		"	or	%0, %2					\n"
		"	" __SC	"%0, %1					\n"
		"	beqzl	%0, 1b					\n"
		"	.set	pop					\n"
		: "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
		: "ir" (BIT(bit)), GCC_OFF_SMALL_ASM() (*m)
		: __LLSC_CLOBBER);
		return;
	}

	if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
	if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
		loongson_llsc_mb();
		loongson_llsc_mb();
		do {
		__bit_op(*m, __INS "%0, %3, %2, 1", "i"(bit), "r"(~0));
			__asm__ __volatile__(
			"	" __LL "%0, %1		# set_bit	\n"
			"	" __INS "%0, %3, %2, 1			\n"
			"	" __SC "%0, %1				\n"
			: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
			: "i" (bit), "r" (~0)
			: __LLSC_CLOBBER);
		} while (unlikely(!temp));
		return;
		return;
	}
	}


	loongson_llsc_mb();
	loongson_llsc_mb();
	do {
	__bit_op(*m, "or\t%0, %2", "ir"(BIT(bit)));
		__asm__ __volatile__(
		"	.set	push				\n"
		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"
		"	" __LL "%0, %1		# set_bit	\n"
		"	or	%0, %2				\n"
		"	" __SC	"%0, %1				\n"
		"	.set	pop				\n"
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
		: "ir" (BIT(bit))
		: __LLSC_CLOBBER);
	} while (unlikely(!temp));
}
}


/*
/*
@@ -119,55 +119,20 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
{
{
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	int bit = nr & SZLONG_MASK;
	int bit = nr & SZLONG_MASK;
	unsigned long temp;


	if (!kernel_uses_llsc) {
	if (!kernel_uses_llsc) {
		__mips_clear_bit(nr, addr);
		__mips_clear_bit(nr, addr);
		return;
		return;
	}
	}


	if (R10000_LLSC_WAR) {
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	arch=r4000				\n"
		"1:	" __LL "%0, %1			# clear_bit	\n"
		"	and	%0, %2					\n"
		"	" __SC "%0, %1					\n"
		"	beqzl	%0, 1b					\n"
		"	.set	pop					\n"
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
		: "ir" (~(BIT(bit)))
		: __LLSC_CLOBBER);
		return;
	}

	if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
	if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
		loongson_llsc_mb();
		loongson_llsc_mb();
		do {
		__bit_op(*m, __INS "%0, $0, %2, 1", "i"(bit));
			__asm__ __volatile__(
			"	" __LL "%0, %1		# clear_bit	\n"
			"	" __INS "%0, $0, %2, 1			\n"
			"	" __SC "%0, %1				\n"
			: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
			: "i" (bit)
			: __LLSC_CLOBBER);
		} while (unlikely(!temp));
		return;
		return;
	}
	}


	loongson_llsc_mb();
	loongson_llsc_mb();
	do {
	__bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit)));
		__asm__ __volatile__(
		"	.set	push				\n"
		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"
		"	" __LL "%0, %1		# clear_bit	\n"
		"	and	%0, %2				\n"
		"	" __SC "%0, %1				\n"
		"	.set	pop				\n"
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
		: "ir" (~(BIT(bit)))
		: __LLSC_CLOBBER);
	} while (unlikely(!temp));
}
}


/*
/*
@@ -197,41 +162,14 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
{
{
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	int bit = nr & SZLONG_MASK;
	int bit = nr & SZLONG_MASK;
	unsigned long temp;


	if (!kernel_uses_llsc) {
	if (!kernel_uses_llsc) {
		__mips_change_bit(nr, addr);
		__mips_change_bit(nr, addr);
		return;
		return;
	}
	}


	if (R10000_LLSC_WAR) {
		__asm__ __volatile__(
		"	.set	push				\n"
		"	.set	arch=r4000			\n"
		"1:	" __LL "%0, %1		# change_bit	\n"
		"	xor	%0, %2				\n"
		"	" __SC	"%0, %1				\n"
		"	beqzl	%0, 1b				\n"
		"	.set	pop				\n"
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
		: "ir" (BIT(bit))
		: __LLSC_CLOBBER);
		return;
	}

	loongson_llsc_mb();
	loongson_llsc_mb();
	do {
	__bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit)));
		__asm__ __volatile__(
		"	.set	push				\n"
		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"
		"	" __LL "%0, %1		# change_bit	\n"
		"	xor	%0, %2				\n"
		"	" __SC	"%0, %1				\n"
		"	.set	pop				\n"
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
		: "ir" (BIT(bit))
		: __LLSC_CLOBBER);
	} while (unlikely(!temp));
}
}


/*
/*
@@ -247,41 +185,16 @@ static inline int test_and_set_bit_lock(unsigned long nr,
{
{
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	int bit = nr & SZLONG_MASK;
	int bit = nr & SZLONG_MASK;
	unsigned long res, temp;
	unsigned long res, orig;


	if (!kernel_uses_llsc) {
	if (!kernel_uses_llsc) {
		res = __mips_test_and_set_bit_lock(nr, addr);
		res = __mips_test_and_set_bit_lock(nr, addr);
	} else if (R10000_LLSC_WAR) {
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	arch=r4000				\n"
		"1:	" __LL "%0, %1		# test_and_set_bit	\n"
		"	or	%2, %0, %3				\n"
		"	" __SC	"%2, %1					\n"
		"	beqzl	%2, 1b					\n"
		"	and	%2, %0, %3				\n"
		"	.set	pop					\n"
		: "=&r" (temp), "+m" (*m), "=&r" (res)
		: "ir" (BIT(bit))
		: __LLSC_CLOBBER);

		res = res != 0;
	} else {
	} else {
		loongson_llsc_mb();
		loongson_llsc_mb();
		do {
		orig = __test_bit_op(*m, "%0",
			__asm__ __volatile__(
				     "or\t%1, %0, %3",
			"	.set	push				\n"
				     "ir"(BIT(bit)));
			"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"
		res = (orig & BIT(bit)) != 0;
			"	" __LL "%0, %1	# test_and_set_bit	\n"
			"	or	%2, %0, %3			\n"
			"	" __SC	"%2, %1				\n"
			"	.set	pop				\n"
			: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
			: "ir" (BIT(bit))
			: __LLSC_CLOBBER);
		} while (unlikely(!res));

		res = (temp & BIT(bit)) != 0;
	}
	}


	smp_llsc_mb();
	smp_llsc_mb();
@@ -317,57 +230,25 @@ static inline int test_and_clear_bit(unsigned long nr,
{
{
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	int bit = nr & SZLONG_MASK;
	int bit = nr & SZLONG_MASK;
	unsigned long res, temp;
	unsigned long res, orig;


	smp_mb__before_llsc();
	smp_mb__before_llsc();


	if (!kernel_uses_llsc) {
	if (!kernel_uses_llsc) {
		res = __mips_test_and_clear_bit(nr, addr);
		res = __mips_test_and_clear_bit(nr, addr);
	} else if (R10000_LLSC_WAR) {
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	arch=r4000				\n"
		"1:	" __LL	"%0, %1		# test_and_clear_bit	\n"
		"	or	%2, %0, %3				\n"
		"	xor	%2, %3					\n"
		"	" __SC	"%2, %1					\n"
		"	beqzl	%2, 1b					\n"
		"	and	%2, %0, %3				\n"
		"	.set	pop					\n"
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
		: "ir" (BIT(bit))
		: __LLSC_CLOBBER);

		res = res != 0;
	} else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
	} else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
		loongson_llsc_mb();
		loongson_llsc_mb();
		do {
		res = __test_bit_op(*m, "%1",
			__asm__ __volatile__(
				    __EXT "%0, %1, %3, 1;"
			"	" __LL	"%0, %1 # test_and_clear_bit	\n"
				    __INS "%1, $0, %3, 1",
			"	" __EXT "%2, %0, %3, 1			\n"
				    "i"(bit));
			"	" __INS "%0, $0, %3, 1			\n"
			"	" __SC	"%0, %1				\n"
			: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
			: "i" (bit)
			: __LLSC_CLOBBER);
		} while (unlikely(!temp));
	} else {
	} else {
		loongson_llsc_mb();
		loongson_llsc_mb();
		do {
		orig = __test_bit_op(*m, "%0",
			__asm__ __volatile__(
				     "or\t%1, %0, %3;"
			"	.set	push				\n"
				     "xor\t%1, %1, %3",
			"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"
				     "ir"(BIT(bit)));
			"	" __LL	"%0, %1 # test_and_clear_bit	\n"
		res = (orig & BIT(bit)) != 0;
			"	or	%2, %0, %3			\n"
			"	xor	%2, %3				\n"
			"	" __SC	"%2, %1				\n"
			"	.set	pop				\n"
			: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
			: "ir" (BIT(bit))
			: __LLSC_CLOBBER);
		} while (unlikely(!res));

		res = (temp & BIT(bit)) != 0;
	}
	}


	smp_llsc_mb();
	smp_llsc_mb();
@@ -388,43 +269,18 @@ static inline int test_and_change_bit(unsigned long nr,
{
{
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
	int bit = nr & SZLONG_MASK;
	int bit = nr & SZLONG_MASK;
	unsigned long res, temp;
	unsigned long res, orig;


	smp_mb__before_llsc();
	smp_mb__before_llsc();


	if (!kernel_uses_llsc) {
	if (!kernel_uses_llsc) {
		res = __mips_test_and_change_bit(nr, addr);
		res = __mips_test_and_change_bit(nr, addr);
	} else if (R10000_LLSC_WAR) {
		__asm__ __volatile__(
		"	.set	push					\n"
		"	.set	arch=r4000				\n"
		"1:	" __LL	"%0, %1		# test_and_change_bit	\n"
		"	xor	%2, %0, %3				\n"
		"	" __SC	"%2, %1					\n"
		"	beqzl	%2, 1b					\n"
		"	and	%2, %0, %3				\n"
		"	.set	pop					\n"
		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
		: "ir" (BIT(bit))
		: __LLSC_CLOBBER);

		res = res != 0;
	} else {
	} else {
		loongson_llsc_mb();
		loongson_llsc_mb();
		do {
		orig = __test_bit_op(*m, "%0",
			__asm__ __volatile__(
				     "xor\t%1, %0, %3",
			"	.set	push				\n"
				     "ir"(BIT(bit)));
			"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"
		res = (orig & BIT(bit)) != 0;
			"	" __LL	"%0, %1 # test_and_change_bit	\n"
			"	xor	%2, %0, %3			\n"
			"	" __SC	"\t%2, %1			\n"
			"	.set	pop				\n"
			: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
			: "ir" (BIT(bit))
			: __LLSC_CLOBBER);
		} while (unlikely(!res));

		res = (temp & BIT(bit)) != 0;
	}
	}


	smp_llsc_mb();
	smp_llsc_mb();
@@ -432,6 +288,9 @@ static inline int test_and_change_bit(unsigned long nr,
	return res;
	return res;
}
}


#undef __bit_op
#undef __test_bit_op

#include <asm-generic/bitops/non-atomic.h>
#include <asm-generic/bitops/non-atomic.h>


/*
/*