From 257515a50057fa97605cc13e5b3b9cc9f964c299 Mon Sep 17 00:00:00 2001 From: Gowrishankar Muthukrishnan Date: Tue, 30 Jan 2018 16:23:18 +0530 Subject: [PATCH] eal/ppc: remove the braces in memory barrier macros Calling rte_smp_{w/r}mb macro expands into a compound block, which would break compiling a else clause following it, if that calling place has been terminated already with ";", as in below code. This patch adds { } around this macro to allow compiling else too. Fixes: d23a6bd04d ("eal/ppc: fix memory barrier for IBM POWER") Fixes: 05c3fd7110 ("eal/ppc: atomic operations for IBM Power") Cc: stable@dpdk.org Signed-off-by: Gowrishankar Muthukrishnan --- lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h b/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h index f38618f903..39fce7b930 100644 --- a/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h +++ b/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h @@ -64,9 +64,9 @@ extern "C" { * occur before the STORE operations generated after. */ #ifdef RTE_ARCH_64 -#define rte_wmb() {asm volatile("lwsync" : : : "memory"); } +#define rte_wmb() asm volatile("lwsync" : : : "memory") #else -#define rte_wmb() {asm volatile("sync" : : : "memory"); } +#define rte_wmb() asm volatile("sync" : : : "memory") #endif /** @@ -76,9 +76,9 @@ extern "C" { * occur before the LOAD operations generated after. */ #ifdef RTE_ARCH_64 -#define rte_rmb() {asm volatile("lwsync" : : : "memory"); } +#define rte_rmb() asm volatile("lwsync" : : : "memory") #else -#define rte_rmb() {asm volatile("sync" : : : "memory"); } +#define rte_rmb() asm volatile("sync" : : : "memory") #endif #define rte_smp_mb() rte_mb() -- 2.20.1