Calling rte_smp_{w/r}mb macro expands into a compound block, which
would break compiling a else clause following it, if that calling
place has been terminated already with ";", as in below code.
This patch adds { } around this macro to allow compiling else too.
Fixes:
d23a6bd04d ("eal/ppc: fix memory barrier for IBM POWER")
Fixes:
05c3fd7110 ("eal/ppc: atomic operations for IBM Power")
Cc: stable@dpdk.org
Signed-off-by: Gowrishankar Muthukrishnan <gowrishankar.m@linux.vnet.ibm.com>
* occur before the STORE operations generated after.
*/
#ifdef RTE_ARCH_64
-#define rte_wmb() {asm volatile("lwsync" : : : "memory"); }
+#define rte_wmb() asm volatile("lwsync" : : : "memory")
#else
-#define rte_wmb() {asm volatile("sync" : : : "memory"); }
+#define rte_wmb() asm volatile("sync" : : : "memory")
#endif
/**
* occur before the LOAD operations generated after.
*/
#ifdef RTE_ARCH_64
-#define rte_rmb() {asm volatile("lwsync" : : : "memory"); }
+#define rte_rmb() asm volatile("lwsync" : : : "memory")
#else
-#define rte_rmb() {asm volatile("sync" : : : "memory"); }
+#define rte_rmb() asm volatile("sync" : : : "memory")
#endif
#define rte_smp_mb() rte_mb()