From d6772940c09e156d3bdb6e2dbefbe95bf794a89a Mon Sep 17 00:00:00 2001 From: Intel Date: Mon, 3 Jun 2013 00:00:00 +0000 Subject: [PATCH] eal: memory barriers use intrinsic functions Signed-off-by: Intel --- lib/librte_eal/common/include/rte_atomic.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/librte_eal/common/include/rte_atomic.h b/lib/librte_eal/common/include/rte_atomic.h index 67eff12363..069ce46737 100644 --- a/lib/librte_eal/common/include/rte_atomic.h +++ b/lib/librte_eal/common/include/rte_atomic.h @@ -64,7 +64,7 @@ extern "C" { * Guarantees that the LOAD and STORE operations generated before the * barrier occur before the LOAD and STORE operations generated after. */ -#define rte_mb() asm volatile("mfence;" : : : "memory") +#define rte_mb() _mm_mfence() /** * Write memory barrier. @@ -72,7 +72,7 @@ extern "C" { * Guarantees that the STORE operations generated before the barrier * occur before the STORE operations generated after. */ -#define rte_wmb() asm volatile("sfence;" : : : "memory") +#define rte_wmb() _mm_sfence() /** * Read memory barrier. @@ -80,7 +80,7 @@ extern "C" { * Guarantees that the LOAD operations generated before the barrier * occur before the LOAD operations generated after. */ -#define rte_rmb() asm volatile("lfence;" : : : "memory") +#define rte_rmb() _mm_lfence() #include -- 2.20.1