4 * Copyright(c) 2010-2013 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 #ifndef _RTE_ATOMIC_H_
36 #define _RTE_ATOMIC_H_
42 * This file defines a generic API for atomic
43 * operations. The implementation is architecture-specific.
45 * See lib/librte_eal/common/include/i686/arch/rte_atomic.h
46 * See lib/librte_eal/common/include/x86_64/arch/rte_atomic.h
55 #if RTE_MAX_LCORE == 1
56 #define MPLOCKED /**< No need to insert MP lock prefix. */
58 #define MPLOCKED "lock ; " /**< Insert MP lock prefix. */
62 * General memory barrier.
64 * Guarantees that the LOAD and STORE operations generated before the
65 * barrier occur before the LOAD and STORE operations generated after.
67 #define rte_mb() asm volatile("mfence;" : : : "memory")
70 * Write memory barrier.
72 * Guarantees that the STORE operations generated before the barrier
73 * occur before the STORE operations generated after.
75 #define rte_wmb() asm volatile("sfence;" : : : "memory")
78 * Read memory barrier.
80 * Guarantees that the LOAD operations generated before the barrier
81 * occur before the LOAD operations generated after.
83 #define rte_rmb() asm volatile("lfence;" : : : "memory")
87 * Atomic Operations on x86_64
90 /*------------------------- 16 bit atomic operations -------------------------*/
93 * Atomic compare and set.
95 * (atomic) equivalent to:
97 * *dst = src (all 16-bit words)
100 * The destination location into which the value will be written.
102 * The expected value.
106 * Non-zero on success; 0 on failure.
109 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
115 "cmpxchgw %[src], %[dst];"
117 : [res] "=a" (res), /* output */
119 : [src] "r" (src), /* input */
122 : "memory"); /* no-clobber list */
127 * The atomic counter structure.
130 volatile int16_t cnt; /**< An internal counter value. */
134 * Static initializer for an atomic counter.
136 #define RTE_ATOMIC16_INIT(val) { (val) }
139 * Initialize an atomic counter.
142 * A pointer to the atomic counter.
145 rte_atomic16_init(rte_atomic16_t *v)
151 * Atomically read a 16-bit value from a counter.
154 * A pointer to the atomic counter.
156 * The value of the counter.
158 static inline int16_t
159 rte_atomic16_read(const rte_atomic16_t *v)
165 * Atomically set a counter to a 16-bit value.
168 * A pointer to the atomic counter.
170 * The new value for the counter.
173 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
179 * Atomically add a 16-bit value to an atomic counter.
182 * A pointer to the atomic counter.
184 * The value to be added to the counter.
187 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
189 __sync_fetch_and_add(&v->cnt, inc);
193 * Atomically subtract a 16-bit value from an atomic counter.
196 * A pointer to the atomic counter.
198 * The value to be subtracted from the counter.
201 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
203 __sync_fetch_and_sub(&v->cnt, dec);
207 * Atomically increment a counter by one.
210 * A pointer to the atomic counter.
213 rte_atomic16_inc(rte_atomic16_t *v)
218 : [cnt] "=m" (v->cnt) /* output */
219 : "m" (v->cnt) /* input */
224 * Atomically decrement a counter by one.
227 * A pointer to the atomic counter.
230 rte_atomic16_dec(rte_atomic16_t *v)
235 : [cnt] "=m" (v->cnt) /* output */
236 : "m" (v->cnt) /* input */
241 * Atomically add a 16-bit value to a counter and return the result.
243 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
244 * returns the value of v after addition.
247 * A pointer to the atomic counter.
249 * The value to be added to the counter.
251 * The value of v after the addition.
253 static inline int16_t
254 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
256 return __sync_add_and_fetch(&v->cnt, inc);
260 * Atomically subtract a 16-bit value from a counter and return
263 * Atomically subtracts the 16-bit value (inc) from the atomic counter
264 * (v) and returns the value of v after the subtraction.
267 * A pointer to the atomic counter.
269 * The value to be subtracted from the counter.
271 * The value of v after the subtraction.
273 static inline int16_t
274 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
276 return __sync_sub_and_fetch(&v->cnt, dec);
280 * Atomically increment a 16-bit counter by one and test.
282 * Atomically increments the atomic counter (v) by one and returns true if
283 * the result is 0, or false in all other cases.
286 * A pointer to the atomic counter.
288 * True if the result after the increment operation is 0; false otherwise.
290 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
298 : [cnt] "+m" (v->cnt), /* output */
305 * Atomically decrement a 16-bit counter by one and test.
307 * Atomically decrements the atomic counter (v) by one and returns true if
308 * the result is 0, or false in all other cases.
311 * A pointer to the atomic counter.
313 * True if the result after the decrement operation is 0; false otherwise.
315 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
319 asm volatile(MPLOCKED
322 : [cnt] "+m" (v->cnt), /* output */
329 * Atomically test and set a 16-bit atomic counter.
331 * If the counter value is already set, return 0 (failed). Otherwise, set
332 * the counter value to 1 and return 1 (success).
335 * A pointer to the atomic counter.
337 * 0 if failed; else 1, success.
339 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
341 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
345 * Atomically set a 16-bit counter to 0.
348 * A pointer to the atomic counter.
350 static inline void rte_atomic16_clear(rte_atomic16_t *v)
355 /*------------------------- 32 bit atomic operations -------------------------*/
358 * Atomic compare and set.
360 * (atomic) equivalent to:
362 * *dst = src (all 32-bit words)
365 * The destination location into which the value will be written.
367 * The expected value.
371 * Non-zero on success; 0 on failure.
374 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
380 "cmpxchgl %[src], %[dst];"
382 : [res] "=a" (res), /* output */
384 : [src] "r" (src), /* input */
387 : "memory"); /* no-clobber list */
392 * The atomic counter structure.
395 volatile int32_t cnt; /**< An internal counter value. */
399 * Static initializer for an atomic counter.
401 #define RTE_ATOMIC32_INIT(val) { (val) }
404 * Initialize an atomic counter.
407 * A pointer to the atomic counter.
410 rte_atomic32_init(rte_atomic32_t *v)
416 * Atomically read a 32-bit value from a counter.
419 * A pointer to the atomic counter.
421 * The value of the counter.
423 static inline int32_t
424 rte_atomic32_read(const rte_atomic32_t *v)
430 * Atomically set a counter to a 32-bit value.
433 * A pointer to the atomic counter.
435 * The new value for the counter.
438 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
444 * Atomically add a 32-bit value to an atomic counter.
447 * A pointer to the atomic counter.
449 * The value to be added to the counter.
452 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
454 __sync_fetch_and_add(&v->cnt, inc);
458 * Atomically subtract a 32-bit value from an atomic counter.
461 * A pointer to the atomic counter.
463 * The value to be subtracted from the counter.
466 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
468 __sync_fetch_and_sub(&v->cnt, dec);
472 * Atomically increment a counter by one.
475 * A pointer to the atomic counter.
478 rte_atomic32_inc(rte_atomic32_t *v)
483 : [cnt] "=m" (v->cnt) /* output */
484 : "m" (v->cnt) /* input */
489 * Atomically decrement a counter by one.
492 * A pointer to the atomic counter.
495 rte_atomic32_dec(rte_atomic32_t *v)
500 : [cnt] "=m" (v->cnt) /* output */
501 : "m" (v->cnt) /* input */
506 * Atomically add a 32-bit value to a counter and return the result.
508 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
509 * returns the value of v after addition.
512 * A pointer to the atomic counter.
514 * The value to be added to the counter.
516 * The value of v after the addition.
518 static inline int32_t
519 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
521 return __sync_add_and_fetch(&v->cnt, inc);
525 * Atomically subtract a 32-bit value from a counter and return
528 * Atomically subtracts the 32-bit value (inc) from the atomic counter
529 * (v) and returns the value of v after the subtraction.
532 * A pointer to the atomic counter.
534 * The value to be subtracted from the counter.
536 * The value of v after the subtraction.
538 static inline int32_t
539 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
541 return __sync_sub_and_fetch(&v->cnt, dec);
545 * Atomically increment a 32-bit counter by one and test.
547 * Atomically increments the atomic counter (v) by one and returns true if
548 * the result is 0, or false in all other cases.
551 * A pointer to the atomic counter.
553 * True if the result after the increment operation is 0; false otherwise.
555 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
563 : [cnt] "+m" (v->cnt), /* output */
570 * Atomically decrement a 32-bit counter by one and test.
572 * Atomically decrements the atomic counter (v) by one and returns true if
573 * the result is 0, or false in all other cases.
576 * A pointer to the atomic counter.
578 * True if the result after the decrement operation is 0; false otherwise.
580 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
584 asm volatile(MPLOCKED
587 : [cnt] "+m" (v->cnt), /* output */
594 * Atomically test and set a 32-bit atomic counter.
596 * If the counter value is already set, return 0 (failed). Otherwise, set
597 * the counter value to 1 and return 1 (success).
600 * A pointer to the atomic counter.
602 * 0 if failed; else 1, success.
604 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
606 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
610 * Atomically set a 32-bit counter to 0.
613 * A pointer to the atomic counter.
615 static inline void rte_atomic32_clear(rte_atomic32_t *v)
620 /* any other functions are in arch specific files */
621 #include "arch/rte_atomic.h"
626 /*------------------------- 64 bit atomic operations -------------------------*/
629 * An atomic compare and set function used by the mutex functions.
630 * (atomic) equivalent to:
632 * *dst = src (all 64-bit words)
635 * The destination into which the value will be written.
637 * The expected value.
641 * Non-zero on success; 0 on failure.
644 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
647 * The atomic counter structure.
650 volatile int64_t cnt; /**< Internal counter value. */
654 * Static initializer for an atomic counter.
656 #define RTE_ATOMIC64_INIT(val) { (val) }
659 * Initialize the atomic counter.
662 * A pointer to the atomic counter.
665 rte_atomic64_init(rte_atomic64_t *v);
668 * Atomically read a 64-bit counter.
671 * A pointer to the atomic counter.
673 * The value of the counter.
675 static inline int64_t
676 rte_atomic64_read(rte_atomic64_t *v);
679 * Atomically set a 64-bit counter.
682 * A pointer to the atomic counter.
684 * The new value of the counter.
687 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
690 * Atomically add a 64-bit value to a counter.
693 * A pointer to the atomic counter.
695 * The value to be added to the counter.
698 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
701 * Atomically subtract a 64-bit value from a counter.
704 * A pointer to the atomic counter.
706 * The value to be subtracted from the counter.
709 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
712 * Atomically increment a 64-bit counter by one and test.
715 * A pointer to the atomic counter.
718 rte_atomic64_inc(rte_atomic64_t *v);
721 * Atomically decrement a 64-bit counter by one and test.
724 * A pointer to the atomic counter.
727 rte_atomic64_dec(rte_atomic64_t *v);
730 * Add a 64-bit value to an atomic counter and return the result.
732 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
733 * returns the value of v after the addition.
736 * A pointer to the atomic counter.
738 * The value to be added to the counter.
740 * The value of v after the addition.
742 static inline int64_t
743 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
746 * Subtract a 64-bit value from an atomic counter and return the result.
748 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
749 * and returns the value of v after the subtraction.
752 * A pointer to the atomic counter.
754 * The value to be subtracted from the counter.
756 * The value of v after the subtraction.
758 static inline int64_t
759 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
762 * Atomically increment a 64-bit counter by one and test.
764 * Atomically increments the atomic counter (v) by one and returns
765 * true if the result is 0, or false in all other cases.
768 * A pointer to the atomic counter.
770 * True if the result after the addition is 0; false otherwise.
773 rte_atomic64_inc_and_test(rte_atomic64_t *v);
776 * Atomically decrement a 64-bit counter by one and test.
778 * Atomically decrements the atomic counter (v) by one and returns true if
779 * the result is 0, or false in all other cases.
782 * A pointer to the atomic counter.
784 * True if the result after subtraction is 0; false otherwise.
787 rte_atomic64_dec_and_test(rte_atomic64_t *v);
790 * Atomically test and set a 64-bit atomic counter.
792 * If the counter value is already set, return 0 (failed). Otherwise, set
793 * the counter value to 1 and return 1 (success).
796 * A pointer to the atomic counter.
798 * 0 if failed; else 1, success.
801 rte_atomic64_test_and_set(rte_atomic64_t *v);
804 * Atomically set a 64-bit counter to 0.
807 * A pointer to the atomic counter.
810 rte_atomic64_clear(rte_atomic64_t *v);
812 #endif /* __DOXYGEN__ */
819 #endif /* _RTE_ATOMIC_H_ */