1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
12 * This file defines a generic API for atomic operations.
16 #include <rte_common.h>
20 /** @name Memory Barrier
24 * General memory barrier.
26 * Guarantees that the LOAD and STORE operations generated before the
27 * barrier occur before the LOAD and STORE operations generated after.
28 * This function is architecture dependent.
30 static inline void rte_mb(void);
33 * Write memory barrier.
35 * Guarantees that the STORE operations generated before the barrier
36 * occur before the STORE operations generated after.
37 * This function is architecture dependent.
39 static inline void rte_wmb(void);
42 * Read memory barrier.
44 * Guarantees that the LOAD operations generated before the barrier
45 * occur before the LOAD operations generated after.
46 * This function is architecture dependent.
48 static inline void rte_rmb(void);
51 /** @name SMP Memory Barrier
55 * General memory barrier between lcores
57 * Guarantees that the LOAD and STORE operations that precede the
58 * rte_smp_mb() call are globally visible across the lcores
59 * before the LOAD and STORE operations that follows it.
61 static inline void rte_smp_mb(void);
64 * Write memory barrier between lcores
66 * Guarantees that the STORE operations that precede the
67 * rte_smp_wmb() call are globally visible across the lcores
68 * before the STORE operations that follows it.
70 static inline void rte_smp_wmb(void);
73 * Read memory barrier between lcores
75 * Guarantees that the LOAD operations that precede the
76 * rte_smp_rmb() call are globally visible across the lcores
77 * before the LOAD operations that follows it.
79 static inline void rte_smp_rmb(void);
82 /** @name I/O Memory Barrier
86 * General memory barrier for I/O device
88 * Guarantees that the LOAD and STORE operations that precede the
89 * rte_io_mb() call are visible to I/O device or CPU before the
90 * LOAD and STORE operations that follow it.
92 static inline void rte_io_mb(void);
95 * Write memory barrier for I/O device
97 * Guarantees that the STORE operations that precede the
98 * rte_io_wmb() call are visible to I/O device before the STORE
99 * operations that follow it.
101 static inline void rte_io_wmb(void);
104 * Read memory barrier for IO device
106 * Guarantees that the LOAD operations on I/O device that precede the
107 * rte_io_rmb() call are visible to CPU before the LOAD
108 * operations that follow it.
110 static inline void rte_io_rmb(void);
113 #endif /* __DOXYGEN__ */
118 * Guarantees that operation reordering does not occur at compile time
119 * for operations directly before and after the barrier.
121 #define rte_compiler_barrier() do { \
122 asm volatile ("" : : : "memory"); \
125 /*------------------------- 16 bit atomic operations -------------------------*/
128 * Atomic compare and set.
130 * (atomic) equivalent to:
132 * *dst = src (all 16-bit words)
135 * The destination location into which the value will be written.
137 * The expected value.
141 * Non-zero on success; 0 on failure.
144 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
146 #ifdef RTE_FORCE_INTRINSICS
148 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
150 return __sync_bool_compare_and_swap(dst, exp, src);
155 * The atomic counter structure.
158 volatile int16_t cnt; /**< An internal counter value. */
162 * Static initializer for an atomic counter.
164 #define RTE_ATOMIC16_INIT(val) { (val) }
167 * Initialize an atomic counter.
170 * A pointer to the atomic counter.
173 rte_atomic16_init(rte_atomic16_t *v)
179 * Atomically read a 16-bit value from a counter.
182 * A pointer to the atomic counter.
184 * The value of the counter.
186 static inline int16_t
187 rte_atomic16_read(const rte_atomic16_t *v)
193 * Atomically set a counter to a 16-bit value.
196 * A pointer to the atomic counter.
198 * The new value for the counter.
201 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
207 * Atomically add a 16-bit value to an atomic counter.
210 * A pointer to the atomic counter.
212 * The value to be added to the counter.
215 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
217 __sync_fetch_and_add(&v->cnt, inc);
221 * Atomically subtract a 16-bit value from an atomic counter.
224 * A pointer to the atomic counter.
226 * The value to be subtracted from the counter.
229 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
231 __sync_fetch_and_sub(&v->cnt, dec);
235 * Atomically increment a counter by one.
238 * A pointer to the atomic counter.
241 rte_atomic16_inc(rte_atomic16_t *v);
243 #ifdef RTE_FORCE_INTRINSICS
245 rte_atomic16_inc(rte_atomic16_t *v)
247 rte_atomic16_add(v, 1);
252 * Atomically decrement a counter by one.
255 * A pointer to the atomic counter.
258 rte_atomic16_dec(rte_atomic16_t *v);
260 #ifdef RTE_FORCE_INTRINSICS
262 rte_atomic16_dec(rte_atomic16_t *v)
264 rte_atomic16_sub(v, 1);
269 * Atomically add a 16-bit value to a counter and return the result.
271 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
272 * returns the value of v after addition.
275 * A pointer to the atomic counter.
277 * The value to be added to the counter.
279 * The value of v after the addition.
281 static inline int16_t
282 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
284 return __sync_add_and_fetch(&v->cnt, inc);
288 * Atomically subtract a 16-bit value from a counter and return
291 * Atomically subtracts the 16-bit value (inc) from the atomic counter
292 * (v) and returns the value of v after the subtraction.
295 * A pointer to the atomic counter.
297 * The value to be subtracted from the counter.
299 * The value of v after the subtraction.
301 static inline int16_t
302 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
304 return __sync_sub_and_fetch(&v->cnt, dec);
308 * Atomically increment a 16-bit counter by one and test.
310 * Atomically increments the atomic counter (v) by one and returns true if
311 * the result is 0, or false in all other cases.
314 * A pointer to the atomic counter.
316 * True if the result after the increment operation is 0; false otherwise.
318 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
320 #ifdef RTE_FORCE_INTRINSICS
321 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
323 return __sync_add_and_fetch(&v->cnt, 1) == 0;
328 * Atomically decrement a 16-bit counter by one and test.
330 * Atomically decrements the atomic counter (v) by one and returns true if
331 * the result is 0, or false in all other cases.
334 * A pointer to the atomic counter.
336 * True if the result after the decrement operation is 0; false otherwise.
338 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
340 #ifdef RTE_FORCE_INTRINSICS
341 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
343 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
348 * Atomically test and set a 16-bit atomic counter.
350 * If the counter value is already set, return 0 (failed). Otherwise, set
351 * the counter value to 1 and return 1 (success).
354 * A pointer to the atomic counter.
356 * 0 if failed; else 1, success.
358 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
360 #ifdef RTE_FORCE_INTRINSICS
361 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
363 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
368 * Atomically set a 16-bit counter to 0.
371 * A pointer to the atomic counter.
373 static inline void rte_atomic16_clear(rte_atomic16_t *v)
378 /*------------------------- 32 bit atomic operations -------------------------*/
381 * Atomic compare and set.
383 * (atomic) equivalent to:
385 * *dst = src (all 32-bit words)
388 * The destination location into which the value will be written.
390 * The expected value.
394 * Non-zero on success; 0 on failure.
397 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
399 #ifdef RTE_FORCE_INTRINSICS
401 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
403 return __sync_bool_compare_and_swap(dst, exp, src);
408 * The atomic counter structure.
411 volatile int32_t cnt; /**< An internal counter value. */
415 * Static initializer for an atomic counter.
417 #define RTE_ATOMIC32_INIT(val) { (val) }
420 * Initialize an atomic counter.
423 * A pointer to the atomic counter.
426 rte_atomic32_init(rte_atomic32_t *v)
432 * Atomically read a 32-bit value from a counter.
435 * A pointer to the atomic counter.
437 * The value of the counter.
439 static inline int32_t
440 rte_atomic32_read(const rte_atomic32_t *v)
446 * Atomically set a counter to a 32-bit value.
449 * A pointer to the atomic counter.
451 * The new value for the counter.
454 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
460 * Atomically add a 32-bit value to an atomic counter.
463 * A pointer to the atomic counter.
465 * The value to be added to the counter.
468 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
470 __sync_fetch_and_add(&v->cnt, inc);
474 * Atomically subtract a 32-bit value from an atomic counter.
477 * A pointer to the atomic counter.
479 * The value to be subtracted from the counter.
482 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
484 __sync_fetch_and_sub(&v->cnt, dec);
488 * Atomically increment a counter by one.
491 * A pointer to the atomic counter.
494 rte_atomic32_inc(rte_atomic32_t *v);
496 #ifdef RTE_FORCE_INTRINSICS
498 rte_atomic32_inc(rte_atomic32_t *v)
500 rte_atomic32_add(v, 1);
505 * Atomically decrement a counter by one.
508 * A pointer to the atomic counter.
511 rte_atomic32_dec(rte_atomic32_t *v);
513 #ifdef RTE_FORCE_INTRINSICS
515 rte_atomic32_dec(rte_atomic32_t *v)
517 rte_atomic32_sub(v,1);
522 * Atomically add a 32-bit value to a counter and return the result.
524 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
525 * returns the value of v after addition.
528 * A pointer to the atomic counter.
530 * The value to be added to the counter.
532 * The value of v after the addition.
534 static inline int32_t
535 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
537 return __sync_add_and_fetch(&v->cnt, inc);
541 * Atomically subtract a 32-bit value from a counter and return
544 * Atomically subtracts the 32-bit value (inc) from the atomic counter
545 * (v) and returns the value of v after the subtraction.
548 * A pointer to the atomic counter.
550 * The value to be subtracted from the counter.
552 * The value of v after the subtraction.
554 static inline int32_t
555 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
557 return __sync_sub_and_fetch(&v->cnt, dec);
561 * Atomically increment a 32-bit counter by one and test.
563 * Atomically increments the atomic counter (v) by one and returns true if
564 * the result is 0, or false in all other cases.
567 * A pointer to the atomic counter.
569 * True if the result after the increment operation is 0; false otherwise.
571 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
573 #ifdef RTE_FORCE_INTRINSICS
574 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
576 return __sync_add_and_fetch(&v->cnt, 1) == 0;
581 * Atomically decrement a 32-bit counter by one and test.
583 * Atomically decrements the atomic counter (v) by one and returns true if
584 * the result is 0, or false in all other cases.
587 * A pointer to the atomic counter.
589 * True if the result after the decrement operation is 0; false otherwise.
591 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
593 #ifdef RTE_FORCE_INTRINSICS
594 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
596 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
601 * Atomically test and set a 32-bit atomic counter.
603 * If the counter value is already set, return 0 (failed). Otherwise, set
604 * the counter value to 1 and return 1 (success).
607 * A pointer to the atomic counter.
609 * 0 if failed; else 1, success.
611 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
613 #ifdef RTE_FORCE_INTRINSICS
614 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
616 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
621 * Atomically set a 32-bit counter to 0.
624 * A pointer to the atomic counter.
626 static inline void rte_atomic32_clear(rte_atomic32_t *v)
631 /*------------------------- 64 bit atomic operations -------------------------*/
634 * An atomic compare and set function used by the mutex functions.
635 * (atomic) equivalent to:
637 * *dst = src (all 64-bit words)
640 * The destination into which the value will be written.
642 * The expected value.
646 * Non-zero on success; 0 on failure.
649 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
651 #ifdef RTE_FORCE_INTRINSICS
653 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
655 return __sync_bool_compare_and_swap(dst, exp, src);
660 * The atomic counter structure.
663 volatile int64_t cnt; /**< Internal counter value. */
667 * Static initializer for an atomic counter.
669 #define RTE_ATOMIC64_INIT(val) { (val) }
672 * Initialize the atomic counter.
675 * A pointer to the atomic counter.
678 rte_atomic64_init(rte_atomic64_t *v);
680 #ifdef RTE_FORCE_INTRINSICS
682 rte_atomic64_init(rte_atomic64_t *v)
690 while (success == 0) {
692 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
700 * Atomically read a 64-bit counter.
703 * A pointer to the atomic counter.
705 * The value of the counter.
707 static inline int64_t
708 rte_atomic64_read(rte_atomic64_t *v);
710 #ifdef RTE_FORCE_INTRINSICS
711 static inline int64_t
712 rte_atomic64_read(rte_atomic64_t *v)
720 while (success == 0) {
722 /* replace the value by itself */
723 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
732 * Atomically set a 64-bit counter.
735 * A pointer to the atomic counter.
737 * The new value of the counter.
740 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
742 #ifdef RTE_FORCE_INTRINSICS
744 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
752 while (success == 0) {
754 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
762 * Atomically add a 64-bit value to a counter.
765 * A pointer to the atomic counter.
767 * The value to be added to the counter.
770 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
772 #ifdef RTE_FORCE_INTRINSICS
774 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
776 __sync_fetch_and_add(&v->cnt, inc);
781 * Atomically subtract a 64-bit value from a counter.
784 * A pointer to the atomic counter.
786 * The value to be subtracted from the counter.
789 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
791 #ifdef RTE_FORCE_INTRINSICS
793 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
795 __sync_fetch_and_sub(&v->cnt, dec);
800 * Atomically increment a 64-bit counter by one and test.
803 * A pointer to the atomic counter.
806 rte_atomic64_inc(rte_atomic64_t *v);
808 #ifdef RTE_FORCE_INTRINSICS
810 rte_atomic64_inc(rte_atomic64_t *v)
812 rte_atomic64_add(v, 1);
817 * Atomically decrement a 64-bit counter by one and test.
820 * A pointer to the atomic counter.
823 rte_atomic64_dec(rte_atomic64_t *v);
825 #ifdef RTE_FORCE_INTRINSICS
827 rte_atomic64_dec(rte_atomic64_t *v)
829 rte_atomic64_sub(v, 1);
834 * Add a 64-bit value to an atomic counter and return the result.
836 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
837 * returns the value of v after the addition.
840 * A pointer to the atomic counter.
842 * The value to be added to the counter.
844 * The value of v after the addition.
846 static inline int64_t
847 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
849 #ifdef RTE_FORCE_INTRINSICS
850 static inline int64_t
851 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
853 return __sync_add_and_fetch(&v->cnt, inc);
858 * Subtract a 64-bit value from an atomic counter and return the result.
860 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
861 * and returns the value of v after the subtraction.
864 * A pointer to the atomic counter.
866 * The value to be subtracted from the counter.
868 * The value of v after the subtraction.
870 static inline int64_t
871 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
873 #ifdef RTE_FORCE_INTRINSICS
874 static inline int64_t
875 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
877 return __sync_sub_and_fetch(&v->cnt, dec);
882 * Atomically increment a 64-bit counter by one and test.
884 * Atomically increments the atomic counter (v) by one and returns
885 * true if the result is 0, or false in all other cases.
888 * A pointer to the atomic counter.
890 * True if the result after the addition is 0; false otherwise.
892 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
894 #ifdef RTE_FORCE_INTRINSICS
895 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
897 return rte_atomic64_add_return(v, 1) == 0;
902 * Atomically decrement a 64-bit counter by one and test.
904 * Atomically decrements the atomic counter (v) by one and returns true if
905 * the result is 0, or false in all other cases.
908 * A pointer to the atomic counter.
910 * True if the result after subtraction is 0; false otherwise.
912 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
914 #ifdef RTE_FORCE_INTRINSICS
915 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
917 return rte_atomic64_sub_return(v, 1) == 0;
922 * Atomically test and set a 64-bit atomic counter.
924 * If the counter value is already set, return 0 (failed). Otherwise, set
925 * the counter value to 1 and return 1 (success).
928 * A pointer to the atomic counter.
930 * 0 if failed; else 1, success.
932 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
934 #ifdef RTE_FORCE_INTRINSICS
935 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
937 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
942 * Atomically set a 64-bit counter to 0.
945 * A pointer to the atomic counter.
947 static inline void rte_atomic64_clear(rte_atomic64_t *v);
949 #ifdef RTE_FORCE_INTRINSICS
950 static inline void rte_atomic64_clear(rte_atomic64_t *v)
952 rte_atomic64_set(v, 0);
956 #endif /* _RTE_ATOMIC_H_ */