*/
#define rte_rmb() _mm_lfence()
+/**
+ * Compiler barrier.
+ *
+ * Guarantees that operation reordering does not occur at compile time
+ * for operations directly before and after the barrier.
+ */
+#define rte_compiler_barrier() do { \
+ asm volatile ("" : : : "memory"); \
+} while(0)
+
#include <emmintrin.h>
/**
* about compiler re-ordering */
};
-/* dummy assembly operation to prevent compiler re-ordering of instructions */
-#define COMPILER_BARRIER() do { asm volatile("" ::: "memory"); } while(0)
-
#define RING_F_SP_ENQ 0x0001 /**< The default enqueue is "single-producer". */
#define RING_F_SC_DEQ 0x0002 /**< The default dequeue is "single-consumer". */
#define RTE_RING_QUOT_EXCEED (1 << 31) /**< Quota exceed for burst ops */
/* write entries in ring */
ENQUEUE_PTRS();
- COMPILER_BARRIER();
+ rte_compiler_barrier();
/* if we exceed the watermark */
if (unlikely(((mask + 1) - free_entries + n) > r->prod.watermark)) {
/* write entries in ring */
ENQUEUE_PTRS();
- COMPILER_BARRIER();
+ rte_compiler_barrier();
/* if we exceed the watermark */
if (unlikely(((mask + 1) - free_entries + n) > r->prod.watermark)) {
/* copy in table */
DEQUEUE_PTRS();
- COMPILER_BARRIER();
+ rte_compiler_barrier();
/*
* If there are other dequeues in progress that preceded us,
/* copy in table */
DEQUEUE_PTRS();
- COMPILER_BARRIER();
+ rte_compiler_barrier();
__RING_STAT_ADD(r, deq_success, n);
r->cons.tail = cons_next;