4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 * Inspired from FreeBSD src/sys/amd64/include/atomic.h
36 * Copyright (c) 1998 Doug Rabson
37 * All rights reserved.
40 #ifndef _RTE_ATOMIC_H_
41 #error "don't include this file directly, please include generic <rte_atomic.h>"
44 #ifndef _RTE_X86_64_ATOMIC_H_
45 #define _RTE_X86_64_ATOMIC_H_
48 /*------------------------- 64 bit atomic operations -------------------------*/
51 * An atomic compare and set function used by the mutex functions.
52 * (atomic) equivalent to:
54 * *dst = src (all 64-bit words)
57 * The destination into which the value will be written.
63 * Non-zero on success; 0 on failure.
66 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
72 "cmpxchgq %[src], %[dst];"
74 : [res] "=a" (res), /* output */
76 : [src] "r" (src), /* input */
79 : "memory"); /* no-clobber list */
85 * The atomic counter structure.
88 volatile int64_t cnt; /**< Internal counter value. */
92 * Static initializer for an atomic counter.
94 #define RTE_ATOMIC64_INIT(val) { (val) }
97 * Initialize the atomic counter.
100 * A pointer to the atomic counter.
103 rte_atomic64_init(rte_atomic64_t *v)
109 * Atomically read a 64-bit counter.
112 * A pointer to the atomic counter.
114 * The value of the counter.
116 static inline int64_t
117 rte_atomic64_read(rte_atomic64_t *v)
123 * Atomically set a 64-bit counter.
126 * A pointer to the atomic counter.
128 * The new value of the counter.
131 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
137 * Atomically add a 64-bit value to a counter.
140 * A pointer to the atomic counter.
142 * The value to be added to the counter.
145 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
149 "addq %[inc], %[cnt]"
150 : [cnt] "=m" (v->cnt) /* output */
151 : [inc] "ir" (inc), /* input */
157 * Atomically subtract a 64-bit value from a counter.
160 * A pointer to the atomic counter.
162 * The value to be subtracted from the counter.
165 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
169 "subq %[dec], %[cnt]"
170 : [cnt] "=m" (v->cnt) /* output */
171 : [dec] "ir" (dec), /* input */
177 * Atomically increment a 64-bit counter by one and test.
180 * A pointer to the atomic counter.
183 rte_atomic64_inc(rte_atomic64_t *v)
188 : [cnt] "=m" (v->cnt) /* output */
189 : "m" (v->cnt) /* input */
194 * Atomically decrement a 64-bit counter by one and test.
197 * A pointer to the atomic counter.
200 rte_atomic64_dec(rte_atomic64_t *v)
205 : [cnt] "=m" (v->cnt) /* output */
206 : "m" (v->cnt) /* input */
211 * Add a 64-bit value to an atomic counter and return the result.
213 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
214 * returns the value of v after the addition.
217 * A pointer to the atomic counter.
219 * The value to be added to the counter.
221 * The value of v after the addition.
223 static inline int64_t
224 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
230 "xaddq %[prev], %[cnt]"
231 : [prev] "+r" (prev), /* output */
233 : "m" (v->cnt) /* input */
239 * Subtract a 64-bit value from an atomic counter and return the result.
241 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
242 * and returns the value of v after the subtraction.
245 * A pointer to the atomic counter.
247 * The value to be subtracted from the counter.
249 * The value of v after the subtraction.
251 static inline int64_t
252 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
254 return rte_atomic64_add_return(v, -dec);
258 * Atomically increment a 64-bit counter by one and test.
260 * Atomically increments the atomic counter (v) by one and returns
261 * true if the result is 0, or false in all other cases.
264 * A pointer to the atomic counter.
266 * True if the result after the addition is 0; false otherwise.
268 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
276 : [cnt] "+m" (v->cnt), /* output */
284 * Atomically decrement a 64-bit counter by one and test.
286 * Atomically decrements the atomic counter (v) by one and returns true if
287 * the result is 0, or false in all other cases.
290 * A pointer to the atomic counter.
292 * True if the result after subtraction is 0; false otherwise.
294 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
302 : [cnt] "+m" (v->cnt), /* output */
309 * Atomically test and set a 64-bit atomic counter.
311 * If the counter value is already set, return 0 (failed). Otherwise, set
312 * the counter value to 1 and return 1 (success).
315 * A pointer to the atomic counter.
317 * 0 if failed; else 1, success.
319 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
321 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
325 * Atomically set a 64-bit counter to 0.
328 * A pointer to the atomic counter.
330 static inline void rte_atomic64_clear(rte_atomic64_t *v)
335 #endif /* _RTE_X86_64_ATOMIC_H_ */