Commit 519be043 authored by Michael S. Tsirkin's avatar Michael S. Tsirkin
Browse files

sparc: reuse asm-generic/barrier.h



On sparc 64 bit dma_rmb, dma_wmb, smp_store_mb, smp_mb, smp_rmb,
smp_wmb, read_barrier_depends and smp_read_barrier_depends match the
asm-generic variants exactly. Drop the local definitions and pull in
asm-generic/barrier.h instead.

nop uses __asm__ __volatile but is otherwise identical to
the generic version, drop that as well.

This is in preparation to refactoring this code area.

Note: nop() was in processor.h and not in barrier.h as on other
architectures. Nothing seems to depend on it being there though.

Signed-off-by: default avatarMichael S. Tsirkin <mst@redhat.com>
Acked-by: default avatarArnd Bergmann <arnd@arndb.de>
Acked-by: default avatarDavid S. Miller <davem@davemloft.net>
Acked-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
parent 21535aae
Loading
Loading
Loading
Loading
+0 −1
Original line number Diff line number Diff line
#ifndef __SPARC_BARRIER_H
#define __SPARC_BARRIER_H

#include <asm/processor.h> /* for nop() */
#include <asm-generic/barrier.h>

#endif /* !(__SPARC_BARRIER_H) */
+2 −19
Original line number Diff line number Diff line
@@ -37,25 +37,6 @@ do { __asm__ __volatile__("ba,pt %%xcc, 1f\n\t" \
#define rmb()	__asm__ __volatile__("":::"memory")
#define wmb()	__asm__ __volatile__("":::"memory")

#define dma_rmb()	rmb()
#define dma_wmb()	wmb()

#define smp_store_mb(__var, __value) \
	do { WRITE_ONCE(__var, __value); membar_safe("#StoreLoad"); } while(0)

#ifdef CONFIG_SMP
#define smp_mb()	mb()
#define smp_rmb()	rmb()
#define smp_wmb()	wmb()
#else
#define smp_mb()	__asm__ __volatile__("":::"memory")
#define smp_rmb()	__asm__ __volatile__("":::"memory")
#define smp_wmb()	__asm__ __volatile__("":::"memory")
#endif

#define read_barrier_depends()		do { } while (0)
#define smp_read_barrier_depends()	do { } while (0)

#define smp_store_release(p, v)						\
do {									\
	compiletime_assert_atomic_type(*p);				\
@@ -74,4 +55,6 @@ do { \
#define smp_mb__before_atomic()	barrier()
#define smp_mb__after_atomic()	barrier()

#include <asm-generic/barrier.h>

#endif /* !(__SPARC64_BARRIER_H) */
+0 −3
Original line number Diff line number Diff line
@@ -5,7 +5,4 @@
#else
#include <asm/processor_32.h>
#endif

#define nop() 		__asm__ __volatile__ ("nop")

#endif