Commit 7c8fc35d authored by Will Deacon's avatar Will Deacon Committed by Ingo Molnar
Browse files

locking/atomics/arm64: Replace our atomic/lock bitop implementations with asm-generic



The <asm-generic/bitops/{atomic,lock}.h> implementations are built around
the atomic-fetch ops, which we implement efficiently for both LSE and
LL/SC systems. Use that instead of our hand-rolled, out-of-line bitops.S.

Signed-off-by: default avatarWill Deacon <will.deacon@arm.com>
Acked-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: linux-arm-kernel@lists.infradead.org
Cc: yamada.masahiro@socionext.com
Link: https://lore.kernel.org/lkml/1529412794-17720-9-git-send-email-will.deacon@arm.com


Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 84c65911
Loading
Loading
Loading
Loading
+2 −12
Original line number Diff line number Diff line
@@ -17,22 +17,11 @@
#define __ASM_BITOPS_H

#include <linux/compiler.h>
#include <asm/barrier.h>

#ifndef _LINUX_BITOPS_H
#error only <linux/bitops.h> can be included directly
#endif

/*
 * Little endian assembly atomic bitops.
 */
extern void set_bit(int nr, volatile unsigned long *p);
extern void clear_bit(int nr, volatile unsigned long *p);
extern void change_bit(int nr, volatile unsigned long *p);
extern int test_and_set_bit(int nr, volatile unsigned long *p);
extern int test_and_clear_bit(int nr, volatile unsigned long *p);
extern int test_and_change_bit(int nr, volatile unsigned long *p);

#include <asm-generic/bitops/builtin-__ffs.h>
#include <asm-generic/bitops/builtin-ffs.h>
#include <asm-generic/bitops/builtin-__fls.h>
@@ -44,8 +33,9 @@ extern int test_and_change_bit(int nr, volatile unsigned long *p);

#include <asm-generic/bitops/sched.h>
#include <asm-generic/bitops/hweight.h>
#include <asm-generic/bitops/lock.h>

#include <asm-generic/bitops/atomic.h>
#include <asm-generic/bitops/lock.h>
#include <asm-generic/bitops/non-atomic.h>
#include <asm-generic/bitops/le.h>

+1 −1
Original line number Diff line number Diff line
# SPDX-License-Identifier: GPL-2.0
lib-y		:= bitops.o clear_user.o delay.o copy_from_user.o	\
lib-y		:= clear_user.o delay.o copy_from_user.o		\
		   copy_to_user.o copy_in_user.o copy_page.o		\
		   clear_page.o memchr.o memcpy.o memmove.o memset.o	\
		   memcmp.o strcmp.o strncmp.o strlen.o strnlen.o	\

arch/arm64/lib/bitops.S

deleted100644 → 0
+0 −76
Original line number Diff line number Diff line
/*
 * Based on arch/arm/lib/bitops.h
 *
 * Copyright (C) 2013 ARM Ltd.
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */

#include <linux/linkage.h>
#include <asm/assembler.h>
#include <asm/lse.h>

/*
 * x0: bits 5:0  bit offset
 *     bits 31:6 word offset
 * x1: address
 */
	.macro	bitop, name, llsc, lse
ENTRY(	\name	)
	and	w3, w0, #63		// Get bit offset
	eor	w0, w0, w3		// Clear low bits
	mov	x2, #1
	add	x1, x1, x0, lsr #3	// Get word offset
alt_lse "	prfm	pstl1strm, [x1]",	"nop"
	lsl	x3, x2, x3		// Create mask

alt_lse	"1:	ldxr	x2, [x1]",		"\lse	x3, [x1]"
alt_lse	"	\llsc	x2, x2, x3",		"nop"
alt_lse	"	stxr	w0, x2, [x1]",		"nop"
alt_lse	"	cbnz	w0, 1b",		"nop"

	ret
ENDPROC(\name	)
	.endm

	.macro	testop, name, llsc, lse
ENTRY(	\name	)
	and	w3, w0, #63		// Get bit offset
	eor	w0, w0, w3		// Clear low bits
	mov	x2, #1
	add	x1, x1, x0, lsr #3	// Get word offset
alt_lse "	prfm	pstl1strm, [x1]",	"nop"
	lsl	x4, x2, x3		// Create mask

alt_lse	"1:	ldxr	x2, [x1]",		"\lse	x4, x2, [x1]"
	lsr	x0, x2, x3
alt_lse	"	\llsc	x2, x2, x4",		"nop"
alt_lse	"	stlxr	w5, x2, [x1]",		"nop"
alt_lse	"	cbnz	w5, 1b",		"nop"
alt_lse	"	dmb	ish",			"nop"

	and	x0, x0, #1
	ret
ENDPROC(\name	)
	.endm

/*
 * Atomic bit operations.
 */
	bitop	change_bit, eor, steor
	bitop	clear_bit, bic, stclr
	bitop	set_bit, orr, stset

	testop	test_and_change_bit, eor, ldeoral
	testop	test_and_clear_bit, bic, ldclral
	testop	test_and_set_bit, orr, ldsetal