f63b7fa275301654ab37ad1e763583e451738e0f
[dpdk.git] / lib / librte_eal / common / include / arch / x86 / rte_atomic_32.h
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation.
3  */
4
5 /*
6  * Inspired from FreeBSD src/sys/i386/include/atomic.h
7  * Copyright (c) 1998 Doug Rabson
8  * All rights reserved.
9  */
10
11 #ifndef _RTE_ATOMIC_X86_H_
12 #error do not include this file directly, use <rte_atomic.h> instead
13 #endif
14
15 #ifndef _RTE_ATOMIC_I686_H_
16 #define _RTE_ATOMIC_I686_H_
17
18 #include <stdint.h>
19 #include <rte_common.h>
20 #include <rte_atomic.h>
21
22 /*------------------------- 64 bit atomic operations -------------------------*/
23
24 #ifndef RTE_FORCE_INTRINSICS
25 static inline int
26 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
27 {
28         uint8_t res;
29         RTE_STD_C11
30         union {
31                 struct {
32                         uint32_t l32;
33                         uint32_t h32;
34                 };
35                 uint64_t u64;
36         } _exp, _src;
37
38         _exp.u64 = exp;
39         _src.u64 = src;
40
41 #ifndef __PIC__
42     asm volatile (
43             MPLOCKED
44             "cmpxchg8b (%[dst]);"
45             "setz %[res];"
46             : [res] "=a" (res)      /* result in eax */
47             : [dst] "S" (dst),      /* esi */
48              "b" (_src.l32),       /* ebx */
49              "c" (_src.h32),       /* ecx */
50              "a" (_exp.l32),       /* eax */
51              "d" (_exp.h32)        /* edx */
52                         : "memory" );           /* no-clobber list */
53 #else
54         asm volatile (
55             "xchgl %%ebx, %%edi;\n"
56                         MPLOCKED
57                         "cmpxchg8b (%[dst]);"
58                         "setz %[res];"
59             "xchgl %%ebx, %%edi;\n"
60                         : [res] "=a" (res)      /* result in eax */
61                         : [dst] "S" (dst),      /* esi */
62                           "D" (_src.l32),       /* ebx */
63                           "c" (_src.h32),       /* ecx */
64                           "a" (_exp.l32),       /* eax */
65                           "d" (_exp.h32)        /* edx */
66                         : "memory" );           /* no-clobber list */
67 #endif
68
69         return res;
70 }
71
72 static inline uint64_t
73 rte_atomic64_exchange(volatile uint64_t *dest, uint64_t val)
74 {
75         uint64_t old;
76
77         do {
78                 old = *dest;
79         } while (rte_atomic64_cmpset(dest, old, val) == 0);
80
81         return old;
82 }
83
84 static inline void
85 rte_atomic64_init(rte_atomic64_t *v)
86 {
87         int success = 0;
88         uint64_t tmp;
89
90         while (success == 0) {
91                 tmp = v->cnt;
92                 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
93                                               tmp, 0);
94         }
95 }
96
97 static inline int64_t
98 rte_atomic64_read(rte_atomic64_t *v)
99 {
100         int success = 0;
101         uint64_t tmp;
102
103         while (success == 0) {
104                 tmp = v->cnt;
105                 /* replace the value by itself */
106                 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
107                                               tmp, tmp);
108         }
109         return tmp;
110 }
111
112 static inline void
113 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
114 {
115         int success = 0;
116         uint64_t tmp;
117
118         while (success == 0) {
119                 tmp = v->cnt;
120                 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
121                                               tmp, new_value);
122         }
123 }
124
125 static inline void
126 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
127 {
128         int success = 0;
129         uint64_t tmp;
130
131         while (success == 0) {
132                 tmp = v->cnt;
133                 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
134                                               tmp, tmp + inc);
135         }
136 }
137
138 static inline void
139 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
140 {
141         int success = 0;
142         uint64_t tmp;
143
144         while (success == 0) {
145                 tmp = v->cnt;
146                 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
147                                               tmp, tmp - dec);
148         }
149 }
150
151 static inline void
152 rte_atomic64_inc(rte_atomic64_t *v)
153 {
154         rte_atomic64_add(v, 1);
155 }
156
157 static inline void
158 rte_atomic64_dec(rte_atomic64_t *v)
159 {
160         rte_atomic64_sub(v, 1);
161 }
162
163 static inline int64_t
164 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
165 {
166         int success = 0;
167         uint64_t tmp;
168
169         while (success == 0) {
170                 tmp = v->cnt;
171                 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
172                                               tmp, tmp + inc);
173         }
174
175         return tmp + inc;
176 }
177
178 static inline int64_t
179 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
180 {
181         int success = 0;
182         uint64_t tmp;
183
184         while (success == 0) {
185                 tmp = v->cnt;
186                 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
187                                               tmp, tmp - dec);
188         }
189
190         return tmp - dec;
191 }
192
193 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
194 {
195         return rte_atomic64_add_return(v, 1) == 0;
196 }
197
198 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
199 {
200         return rte_atomic64_sub_return(v, 1) == 0;
201 }
202
203 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
204 {
205         return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
206 }
207
208 static inline void rte_atomic64_clear(rte_atomic64_t *v)
209 {
210         rte_atomic64_set(v, 0);
211 }
212 #endif
213
214 #endif /* _RTE_ATOMIC_I686_H_ */