Commit fedb8da9 authored by John David Anglin's avatar John David Anglin Committed by Helge Deller
Browse files

parisc: Define mb() and add memory barriers to assembler unlock sequences

For years I thought all parisc machines executed loads and stores in
order. However, Jeff Law recently indicated on gcc-patches that this is
not correct. There are various degrees of out-of-order execution all the
way back to the PA7xxx processor series (hit-under-miss). The PA8xxx
series has full out-of-order execution for both integer operations, and
loads and stores.

This is described in the following article:
http://web.archive.org/web/20040214092531/http://www.cpus.hp.com/technical_references/advperf.shtml



For this reason, we need to define mb() and to insert a memory barrier
before the store unlocking spinlocks. This ensures that all memory
accesses are complete prior to unlocking. The ldcw instruction performs
the same function on entry.

Signed-off-by: default avatarJohn David Anglin <dave.anglin@bell.net>
Cc: stable@vger.kernel.org # 4.0+
Signed-off-by: default avatarHelge Deller <deller@gmx.de>
parent 66509a27
Loading
Loading
Loading
Loading
+32 −0
Original line number Diff line number Diff line
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef __ASM_BARRIER_H
#define __ASM_BARRIER_H

#ifndef __ASSEMBLY__

/* The synchronize caches instruction executes as a nop on systems in
   which all memory references are performed in order. */
#define synchronize_caches() __asm__ __volatile__ ("sync" : : : "memory")

#if defined(CONFIG_SMP)
#define mb()		do { synchronize_caches(); } while (0)
#define rmb()		mb()
#define wmb()		mb()
#define dma_rmb()	mb()
#define dma_wmb()	mb()
#else
#define mb()		barrier()
#define rmb()		barrier()
#define wmb()		barrier()
#define dma_rmb()	barrier()
#define dma_wmb()	barrier()
#endif

#define __smp_mb()	mb()
#define __smp_rmb()	mb()
#define __smp_wmb()	mb()

#include <asm-generic/barrier.h>

#endif /* !__ASSEMBLY__ */
#endif /* __ASM_BARRIER_H */
+2 −0
Original line number Diff line number Diff line
@@ -481,6 +481,8 @@
	/* Release pa_tlb_lock lock without reloading lock address. */
	.macro		tlb_unlock0	spc,tmp
#ifdef CONFIG_SMP
	or,COND(=)	%r0,\spc,%r0
	sync
	or,COND(=)	%r0,\spc,%r0
	stw             \spc,0(\tmp)
#endif
+1 −0
Original line number Diff line number Diff line
@@ -353,6 +353,7 @@ ENDPROC_CFI(flush_data_cache_local)
	.macro	tlb_unlock	la,flags,tmp
#ifdef CONFIG_SMP
	ldi		1,\tmp
	sync
	stw		\tmp,0(\la)
	mtsm		\flags
#endif
+4 −0
Original line number Diff line number Diff line
@@ -633,6 +633,7 @@ cas_action:
	sub,<>	%r28, %r25, %r0
2:	stw,ma	%r24, 0(%r26)
	/* Free lock */
	sync
	stw,ma	%r20, 0(%sr2,%r20)
#if ENABLE_LWS_DEBUG
	/* Clear thread register indicator */
@@ -647,6 +648,7 @@ cas_action:
3:		
	/* Error occurred on load or store */
	/* Free lock */
	sync
	stw	%r20, 0(%sr2,%r20)
#if ENABLE_LWS_DEBUG
	stw	%r0, 4(%sr2,%r20)
@@ -848,6 +850,7 @@ cas2_action:

cas2_end:
	/* Free lock */
	sync
	stw,ma	%r20, 0(%sr2,%r20)
	/* Enable interrupts */
	ssm	PSW_SM_I, %r0
@@ -858,6 +861,7 @@ cas2_end:
22:
	/* Error occurred on load or store */
	/* Free lock */
	sync
	stw	%r20, 0(%sr2,%r20)
	ssm	PSW_SM_I, %r0
	ldo	1(%r0),%r28