4 * Copyright(c) 2010-2013 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 * Inspired from FreeBSD src/sys/amd64/include/atomic.h
37 * Copyright (c) 1998 Doug Rabson
38 * All rights reserved.
41 #ifndef _RTE_ATOMIC_H_
42 #error "don't include this file directly, please include generic <rte_atomic.h>"
45 #ifndef _RTE_X86_64_ATOMIC_H_
46 #define _RTE_X86_64_ATOMIC_H_
49 /*------------------------- 64 bit atomic operations -------------------------*/
52 * An atomic compare and set function used by the mutex functions.
53 * (atomic) equivalent to:
55 * *dst = src (all 64-bit words)
58 * The destination into which the value will be written.
64 * Non-zero on success; 0 on failure.
67 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
73 "cmpxchgq %[src], %[dst];"
75 : [res] "=a" (res), /* output */
77 : [src] "r" (src), /* input */
80 : "memory"); /* no-clobber list */
86 * The atomic counter structure.
89 volatile int64_t cnt; /**< Internal counter value. */
93 * Static initializer for an atomic counter.
95 #define RTE_ATOMIC64_INIT(val) { (val) }
98 * Initialize the atomic counter.
101 * A pointer to the atomic counter.
104 rte_atomic64_init(rte_atomic64_t *v)
110 * Atomically read a 64-bit counter.
113 * A pointer to the atomic counter.
115 * The value of the counter.
117 static inline int64_t
118 rte_atomic64_read(rte_atomic64_t *v)
124 * Atomically set a 64-bit counter.
127 * A pointer to the atomic counter.
129 * The new value of the counter.
132 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
138 * Atomically add a 64-bit value to a counter.
141 * A pointer to the atomic counter.
143 * The value to be added to the counter.
146 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
150 "addq %[inc], %[cnt]"
151 : [cnt] "=m" (v->cnt) /* output */
152 : [inc] "ir" (inc), /* input */
158 * Atomically subtract a 64-bit value from a counter.
161 * A pointer to the atomic counter.
163 * The value to be subtracted from the counter.
166 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
170 "subq %[dec], %[cnt]"
171 : [cnt] "=m" (v->cnt) /* output */
172 : [dec] "ir" (dec), /* input */
178 * Atomically increment a 64-bit counter by one and test.
181 * A pointer to the atomic counter.
184 rte_atomic64_inc(rte_atomic64_t *v)
189 : [cnt] "=m" (v->cnt) /* output */
190 : "m" (v->cnt) /* input */
195 * Atomically decrement a 64-bit counter by one and test.
198 * A pointer to the atomic counter.
201 rte_atomic64_dec(rte_atomic64_t *v)
206 : [cnt] "=m" (v->cnt) /* output */
207 : "m" (v->cnt) /* input */
212 * Add a 64-bit value to an atomic counter and return the result.
214 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
215 * returns the value of v after the addition.
218 * A pointer to the atomic counter.
220 * The value to be added to the counter.
222 * The value of v after the addition.
224 static inline int64_t
225 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
231 "xaddq %[prev], %[cnt]"
232 : [prev] "+r" (prev), /* output */
234 : "m" (v->cnt) /* input */
240 * Subtract a 64-bit value from an atomic counter and return the result.
242 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
243 * and returns the value of v after the subtraction.
246 * A pointer to the atomic counter.
248 * The value to be subtracted from the counter.
250 * The value of v after the subtraction.
252 static inline int64_t
253 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
255 return rte_atomic64_add_return(v, -dec);
259 * Atomically increment a 64-bit counter by one and test.
261 * Atomically increments the atomic counter (v) by one and returns
262 * true if the result is 0, or false in all other cases.
265 * A pointer to the atomic counter.
267 * True if the result after the addition is 0; false otherwise.
269 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
277 : [cnt] "+m" (v->cnt), /* output */
285 * Atomically decrement a 64-bit counter by one and test.
287 * Atomically decrements the atomic counter (v) by one and returns true if
288 * the result is 0, or false in all other cases.
291 * A pointer to the atomic counter.
293 * True if the result after subtraction is 0; false otherwise.
295 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
303 : [cnt] "+m" (v->cnt), /* output */
310 * Atomically test and set a 64-bit atomic counter.
312 * If the counter value is already set, return 0 (failed). Otherwise, set
313 * the counter value to 1 and return 1 (success).
316 * A pointer to the atomic counter.
318 * 0 if failed; else 1, success.
320 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
322 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
326 * Atomically set a 64-bit counter to 0.
329 * A pointer to the atomic counter.
331 static inline void rte_atomic64_clear(rte_atomic64_t *v)
336 #endif /* _RTE_X86_64_ATOMIC_H_ */