From 8015c5593acc3ed490d75c70ff67961b7e278e38 Mon Sep 17 00:00:00 2001 From: Dekel Peled Date: Mon, 18 Mar 2019 14:58:13 +0200 Subject: [PATCH] eal/ppc: fix global memory barrier From previous patch description: "to improve performance on PPC64, use light weight sync instruction instead of sync instruction." Excerpt from IBM doc [1], section "Memory barrier instructions": "The second form of the sync instruction is light-weight sync, or lwsync. This form is used to control ordering for storage accesses to system memory only. It does not create a memory barrier for accesses to device memory." This patch removes the use of lwsync, so calls to rte_wmb() and rte_rmb() will provide correct memory barrier to ensure order of accesses to system memory and device memory. [1] https://www.ibm.com/developerworks/systems/articles/powerpc.html Fixes: d23a6bd04d72 ("eal/ppc: fix memory barrier for IBM POWER") Cc: stable@dpdk.org Signed-off-by: Dekel Peled --- lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h | 8 -------- 1 file changed, 8 deletions(-) diff --git a/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h b/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h index ce38350bdc..797381c0f5 100644 --- a/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h +++ b/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h @@ -63,11 +63,7 @@ extern "C" { * Guarantees that the STORE operations generated before the barrier * occur before the STORE operations generated after. */ -#ifdef RTE_ARCH_64 -#define rte_wmb() asm volatile("lwsync" : : : "memory") -#else #define rte_wmb() asm volatile("sync" : : : "memory") -#endif /** * Read memory barrier. @@ -75,11 +71,7 @@ extern "C" { * Guarantees that the LOAD operations generated before the barrier * occur before the LOAD operations generated after. */ -#ifdef RTE_ARCH_64 -#define rte_rmb() asm volatile("lwsync" : : : "memory") -#else #define rte_rmb() asm volatile("sync" : : : "memory") -#endif #define rte_smp_mb() rte_mb() -- 2.20.1