4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 * Inspired from FreeBSD src/sys/i386/include/atomic.h
36 * Copyright (c) 1998 Doug Rabson
37 * All rights reserved.
40 #ifndef _RTE_ATOMIC_H_
41 #error "don't include this file directly, please include generic <rte_atomic.h>"
44 #ifndef _RTE_I686_ATOMIC_H_
45 #define _RTE_I686_ATOMIC_H_
50 * Atomic Operations on i686
54 /*------------------------- 64 bit atomic operations -------------------------*/
57 * An atomic compare and set function used by the mutex functions.
58 * (atomic) equivalent to:
60 * *dst = src (all 64-bit words)
63 * The destination into which the value will be written.
69 * Non-zero on success; 0 on failure.
72 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
91 : [res] "=a" (res) /* result in eax */
92 : [dst] "S" (dst), /* esi */
93 "b" (_src.l32), /* ebx */
94 "c" (_src.h32), /* ecx */
95 "a" (_exp.l32), /* eax */
96 "d" (_exp.h32) /* edx */
97 : "memory" ); /* no-clobber list */
102 "cmpxchg8b (%[dst]);"
104 "xchgl %%ebx, %%edi;\n"
105 : [res] "=a" (res) /* result in eax */
106 : [dst] "S" (dst), /* esi */
107 "D" (_src.l32), /* ebx */
108 "c" (_src.h32), /* ecx */
109 "a" (_exp.l32), /* eax */
110 "d" (_exp.h32) /* edx */
111 : "memory" ); /* no-clobber list */
118 * The atomic counter structure.
121 volatile int64_t cnt; /**< Internal counter value. */
125 * Static initializer for an atomic counter.
127 #define RTE_ATOMIC64_INIT(val) { (val) }
130 * Initialize the atomic counter.
133 * A pointer to the atomic counter.
136 rte_atomic64_init(rte_atomic64_t *v)
141 while (success == 0) {
143 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
149 * Atomically read a 64-bit counter.
152 * A pointer to the atomic counter.
154 * The value of the counter.
156 static inline int64_t
157 rte_atomic64_read(rte_atomic64_t *v)
162 while (success == 0) {
164 /* replace the value by itself */
165 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
172 * Atomically set a 64-bit counter.
175 * A pointer to the atomic counter.
177 * The new value of the counter.
180 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
185 while (success == 0) {
187 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
193 * Atomically add a 64-bit value to a counter.
196 * A pointer to the atomic counter.
198 * The value to be added to the counter.
201 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
206 while (success == 0) {
208 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
214 * Atomically subtract a 64-bit value from a counter.
217 * A pointer to the atomic counter.
219 * The value to be substracted from the counter.
222 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
227 while (success == 0) {
229 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
235 * Atomically increment a 64-bit counter by one and test.
238 * A pointer to the atomic counter.
241 rte_atomic64_inc(rte_atomic64_t *v)
243 rte_atomic64_add(v, 1);
247 * Atomically decrement a 64-bit counter by one and test.
250 * A pointer to the atomic counter.
253 rte_atomic64_dec(rte_atomic64_t *v)
255 rte_atomic64_sub(v, 1);
259 * Add a 64-bit value to an atomic counter and return the result.
261 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
262 * returns the value of v after the addition.
265 * A pointer to the atomic counter.
267 * The value to be added to the counter.
269 * The value of v after the addition.
271 static inline int64_t
272 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
277 while (success == 0) {
279 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
287 * Subtract a 64-bit value from an atomic counter and return the result.
289 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
290 * and returns the value of v after the substraction.
293 * A pointer to the atomic counter.
295 * The value to be substracted from the counter.
297 * The value of v after the substraction.
299 static inline int64_t
300 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
305 while (success == 0) {
307 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
315 * Atomically increment a 64-bit counter by one and test.
317 * Atomically increments the atomic counter (v) by one and returns
318 * true if the result is 0, or false in all other cases.
321 * A pointer to the atomic counter.
323 * True if the result after the addition is 0; false otherwise.
325 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
327 return rte_atomic64_add_return(v, 1) == 0;
331 * Atomically decrement a 64-bit counter by one and test.
333 * Atomically decrements the atomic counter (v) by one and returns true if
334 * the result is 0, or false in all other cases.
337 * A pointer to the atomic counter.
339 * True if the result after substraction is 0; false otherwise.
341 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
343 return rte_atomic64_sub_return(v, 1) == 0;
347 * Atomically test and set a 64-bit atomic counter.
349 * If the counter value is already set, return 0 (failed). Otherwise, set
350 * the counter value to 1 and return 1 (success).
353 * A pointer to the atomic counter.
355 * 0 if failed; else 1, success.
357 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
359 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
363 * Atomically set a 64-bit counter to 0.
366 * A pointer to the atomic counter.
368 static inline void rte_atomic64_clear(rte_atomic64_t *v)
370 rte_atomic64_set(v, 0);
373 #endif /* _RTE_I686_ATOMIC_H_ */