From: Lennert Buytenhek Date: Sat, 31 Mar 2007 11:03:20 +0000 (+0100) Subject: [ARM] 4298/1: fix memory barriers for DMA coherent and SMP platforms X-Git-Tag: firefly_0821_release~30447^2 X-Git-Url: http://demsky.eecs.uci.edu/git/?a=commitdiff_plain;h=398e692fd5cecdd25d311b47bbae69f7bac3a3cb;p=firefly-linux-kernel-4.4.55.git [ARM] 4298/1: fix memory barriers for DMA coherent and SMP platforms This patch: - Switches mb/rmb/wmb back to being full-blown DMBs on ARM SMP systems, since mb/rmb/wmb are required to order Normal memory accesses as well. - Enables the use of DMB and ISB on XSC3 (which is an ARMv5TE ISA core but conforms to the ARMv6 memory ordering model and supports the various ARMv6 barriers.) - Makes DMA coherent platforms (only ixp23xx at the moment) map mb/rmb/wmb to dmb(), as on DMA coherent platforms, DMA consistent mappings are done as Normal mappings, which are weakly ordered. Signed-off-by: Lennert Buytenhek Acked-by: David Howells Acked-by: Catalin Marinas Acked-by: Paul E. McKenney Acked-by: Dan Williams Signed-off-by: Russell King --- diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h index f06d8a43fdee..69134c7518c1 100644 --- a/include/asm-arm/system.h +++ b/include/asm-arm/system.h @@ -3,6 +3,7 @@ #ifdef __KERNEL__ +#include #define CPU_ARCH_UNKNOWN 0 #define CPU_ARCH_ARMv3 1 @@ -154,7 +155,7 @@ extern unsigned int user_debug; #define vectors_high() (0) #endif -#if __LINUX_ARM_ARCH__ >= 6 +#if defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ >= 6 #define isb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ : : "r" (0) : "memory") #define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ @@ -168,22 +169,23 @@ extern unsigned int user_debug; #define dmb() __asm__ __volatile__ ("" : : : "memory") #endif -#define mb() barrier() -#define rmb() barrier() -#define wmb() barrier() -#define read_barrier_depends() do { } while(0) - -#ifdef CONFIG_SMP -#define smp_mb() dmb() -#define smp_rmb() dmb() -#define smp_wmb() dmb() -#define smp_read_barrier_depends() read_barrier_depends() +#ifndef CONFIG_SMP +#define mb() do { if (arch_is_coherent()) dmb(); else barrier(); } while (0) +#define rmb() do { if (arch_is_coherent()) dmb(); else barrier(); } while (0) +#define wmb() do { if (arch_is_coherent()) dmb(); else barrier(); } while (0) +#define smp_mb() barrier() +#define smp_rmb() barrier() +#define smp_wmb() barrier() #else -#define smp_mb() barrier() -#define smp_rmb() barrier() -#define smp_wmb() barrier() -#define smp_read_barrier_depends() read_barrier_depends() -#endif /* CONFIG_SMP */ +#define mb() dmb() +#define rmb() dmb() +#define wmb() dmb() +#define smp_mb() dmb() +#define smp_rmb() dmb() +#define smp_wmb() dmb() +#endif +#define read_barrier_depends() do { } while(0) +#define smp_read_barrier_depends() do { } while(0) #define set_mb(var, value) do { var = value; smp_mb(); } while (0) #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");