Commit cceb0183 authored by Heiko Carstens's avatar Heiko Carstens Committed by Vasily Gorbik
Browse files

s390/alternatives: make use of asm_inline



This is the s390 version of commit 40576e5e ("x86: alternative.h:
use asm_inline for all alternative variants").

See commit eb111869 ("compiler-types.h: add asm_inline
definition") for more details.

With this change the compiler will not generate many out-of-line
versions for the three instruction sized arch_spin_unlock() function
anymore. Due to this gcc seems to change a lot of other inline
decisions which results in a net 6k text size growth according to
bloat-o-meter (gcc 9.2 with defconfig).
But that's still better than having many out-of-line versions of
arch_spin_unlock().

Signed-off-by: default avatarHeiko Carstens <heiko.carstens@de.ibm.com>
Signed-off-by: default avatarVasily Gorbik <gor@linux.ibm.com>
parent 6a3035da
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -139,10 +139,10 @@ void apply_alternatives(struct alt_instr *start, struct alt_instr *end);
 * without volatile and memory clobber.
 */
#define alternative(oldinstr, altinstr, facility)			\
	asm volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory")
	asm_inline volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory")

#define alternative_2(oldinstr, altinstr1, facility1, altinstr2, facility2) \
	asm volatile(ALTERNATIVE_2(oldinstr, altinstr1, facility1,	    \
	asm_inline volatile(ALTERNATIVE_2(oldinstr, altinstr1, facility1,   \
				   altinstr2, facility2) ::: "memory")

#endif /* __ASSEMBLY__ */
+1 −1
Original line number Diff line number Diff line
@@ -85,7 +85,7 @@ static inline int arch_spin_trylock(arch_spinlock_t *lp)
static inline void arch_spin_unlock(arch_spinlock_t *lp)
{
	typecheck(int, lp->lock);
	asm volatile(
	asm_inline volatile(
		ALTERNATIVE("", ".long 0xb2fa0070", 49)	/* NIAI 7 */
		"	sth	%1,%0\n"
		: "=Q" (((unsigned short *) &lp->lock)[1])
+2 −2
Original line number Diff line number Diff line
@@ -74,7 +74,7 @@ static inline int arch_load_niai4(int *lock)
{
	int owner;

	asm volatile(
	asm_inline volatile(
		ALTERNATIVE("", ".long 0xb2fa0040", 49)	/* NIAI 4 */
		"	l	%0,%1\n"
		: "=d" (owner) : "Q" (*lock) : "memory");
@@ -85,7 +85,7 @@ static inline int arch_cmpxchg_niai8(int *lock, int old, int new)
{
	int expected = old;

	asm volatile(
	asm_inline volatile(
		ALTERNATIVE("", ".long 0xb2fa0080", 49)	/* NIAI 8 */
		"	cs	%0,%3,%1\n"
		: "=d" (old), "=Q" (*lock)