1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
5 #ifndef _RTE_ATOMIC_X86_H_
6 #define _RTE_ATOMIC_X86_H_
13 #include <rte_common.h>
14 #include <rte_config.h>
15 #include <emmintrin.h>
16 #include "generic/rte_atomic.h"
18 #if RTE_MAX_LCORE == 1
19 #define MPLOCKED /**< No need to insert MP lock prefix. */
21 #define MPLOCKED "lock ; " /**< Insert MP lock prefix. */
24 #define rte_mb() _mm_mfence()
26 #define rte_wmb() _mm_sfence()
28 #define rte_rmb() _mm_lfence()
30 #define rte_smp_mb() rte_mb()
32 #define rte_smp_wmb() rte_compiler_barrier()
34 #define rte_smp_rmb() rte_compiler_barrier()
36 #define rte_io_mb() rte_mb()
38 #define rte_io_wmb() rte_compiler_barrier()
40 #define rte_io_rmb() rte_compiler_barrier()
42 #define rte_cio_wmb() rte_compiler_barrier()
44 #define rte_cio_rmb() rte_compiler_barrier()
46 /*------------------------- 16 bit atomic operations -------------------------*/
48 #ifndef RTE_FORCE_INTRINSICS
50 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
56 "cmpxchgw %[src], %[dst];"
58 : [res] "=a" (res), /* output */
60 : [src] "r" (src), /* input */
63 : "memory"); /* no-clobber list */
67 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
69 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
73 rte_atomic16_inc(rte_atomic16_t *v)
78 : [cnt] "=m" (v->cnt) /* output */
79 : "m" (v->cnt) /* input */
84 rte_atomic16_dec(rte_atomic16_t *v)
89 : [cnt] "=m" (v->cnt) /* output */
90 : "m" (v->cnt) /* input */
94 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
102 : [cnt] "+m" (v->cnt), /* output */
108 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
112 asm volatile(MPLOCKED
115 : [cnt] "+m" (v->cnt), /* output */
121 /*------------------------- 32 bit atomic operations -------------------------*/
124 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
130 "cmpxchgl %[src], %[dst];"
132 : [res] "=a" (res), /* output */
134 : [src] "r" (src), /* input */
137 : "memory"); /* no-clobber list */
141 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
143 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
147 rte_atomic32_inc(rte_atomic32_t *v)
152 : [cnt] "=m" (v->cnt) /* output */
153 : "m" (v->cnt) /* input */
158 rte_atomic32_dec(rte_atomic32_t *v)
163 : [cnt] "=m" (v->cnt) /* output */
164 : "m" (v->cnt) /* input */
168 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
176 : [cnt] "+m" (v->cnt), /* output */
182 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
186 asm volatile(MPLOCKED
189 : [cnt] "+m" (v->cnt), /* output */
197 #include "rte_atomic_32.h"
199 #include "rte_atomic_64.h"
206 #endif /* _RTE_ATOMIC_X86_H_ */