1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation.
6 * Inspired from FreeBSD src/sys/i386/include/atomic.h
7 * Copyright (c) 1998 Doug Rabson
11 #ifndef _RTE_ATOMIC_X86_H_
12 #error do not include this file directly, use <rte_atomic.h> instead
15 #ifndef _RTE_ATOMIC_I686_H_
16 #define _RTE_ATOMIC_I686_H_
19 #include <rte_common.h>
21 /*------------------------- 64 bit atomic operations -------------------------*/
23 #ifndef RTE_FORCE_INTRINSICS
25 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
45 : [res] "=a" (res) /* result in eax */
46 : [dst] "S" (dst), /* esi */
47 "b" (_src.l32), /* ebx */
48 "c" (_src.h32), /* ecx */
49 "a" (_exp.l32), /* eax */
50 "d" (_exp.h32) /* edx */
51 : "memory" ); /* no-clobber list */
54 "xchgl %%ebx, %%edi;\n"
58 "xchgl %%ebx, %%edi;\n"
59 : [res] "=a" (res) /* result in eax */
60 : [dst] "S" (dst), /* esi */
61 "D" (_src.l32), /* ebx */
62 "c" (_src.h32), /* ecx */
63 "a" (_exp.l32), /* eax */
64 "d" (_exp.h32) /* edx */
65 : "memory" ); /* no-clobber list */
71 static inline uint64_t
72 rte_atomic64_exchange(volatile uint64_t *dest, uint64_t val)
78 } while (rte_atomic64_cmpset(dest, old, val) == 0);
84 rte_atomic64_init(rte_atomic64_t *v)
89 while (success == 0) {
91 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
97 rte_atomic64_read(rte_atomic64_t *v)
102 while (success == 0) {
104 /* replace the value by itself */
105 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
112 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
117 while (success == 0) {
119 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
125 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
130 while (success == 0) {
132 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
138 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
143 while (success == 0) {
145 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
151 rte_atomic64_inc(rte_atomic64_t *v)
153 rte_atomic64_add(v, 1);
157 rte_atomic64_dec(rte_atomic64_t *v)
159 rte_atomic64_sub(v, 1);
162 static inline int64_t
163 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
168 while (success == 0) {
170 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
177 static inline int64_t
178 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
183 while (success == 0) {
185 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
192 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
194 return rte_atomic64_add_return(v, 1) == 0;
197 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
199 return rte_atomic64_sub_return(v, 1) == 0;
202 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
204 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
207 static inline void rte_atomic64_clear(rte_atomic64_t *v)
209 rte_atomic64_set(v, 0);
213 #endif /* _RTE_ATOMIC_I686_H_ */