From: Intel Date: Mon, 3 Jun 2013 00:00:00 +0000 (+0000) Subject: eal: memory barriers use intrinsic functions X-Git-Tag: spdx-start~11261 X-Git-Url: http://git.droids-corp.org/?a=commitdiff_plain;h=d6772940c09e156d3bdb6e2dbefbe95bf794a89a;p=dpdk.git eal: memory barriers use intrinsic functions Signed-off-by: Intel --- diff --git a/lib/librte_eal/common/include/rte_atomic.h b/lib/librte_eal/common/include/rte_atomic.h index 67eff12363..069ce46737 100644 --- a/lib/librte_eal/common/include/rte_atomic.h +++ b/lib/librte_eal/common/include/rte_atomic.h @@ -64,7 +64,7 @@ extern "C" { * Guarantees that the LOAD and STORE operations generated before the * barrier occur before the LOAD and STORE operations generated after. */ -#define rte_mb() asm volatile("mfence;" : : : "memory") +#define rte_mb() _mm_mfence() /** * Write memory barrier. @@ -72,7 +72,7 @@ extern "C" { * Guarantees that the STORE operations generated before the barrier * occur before the STORE operations generated after. */ -#define rte_wmb() asm volatile("sfence;" : : : "memory") +#define rte_wmb() _mm_sfence() /** * Read memory barrier. @@ -80,7 +80,7 @@ extern "C" { * Guarantees that the LOAD operations generated before the barrier * occur before the LOAD operations generated after. */ -#define rte_rmb() asm volatile("lfence;" : : : "memory") +#define rte_rmb() _mm_lfence() #include