1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
12 * This file defines a generic API for atomic operations.
16 #include <rte_common.h>
20 /** @name Memory Barrier
24 * General memory barrier.
26 * Guarantees that the LOAD and STORE operations generated before the
27 * barrier occur before the LOAD and STORE operations generated after.
29 static inline void rte_mb(void);
32 * Write memory barrier.
34 * Guarantees that the STORE operations generated before the barrier
35 * occur before the STORE operations generated after.
37 static inline void rte_wmb(void);
40 * Read memory barrier.
42 * Guarantees that the LOAD operations generated before the barrier
43 * occur before the LOAD operations generated after.
45 static inline void rte_rmb(void);
48 /** @name SMP Memory Barrier
52 * General memory barrier between lcores
54 * Guarantees that the LOAD and STORE operations that precede the
55 * rte_smp_mb() call are globally visible across the lcores
56 * before the LOAD and STORE operations that follows it.
58 static inline void rte_smp_mb(void);
61 * Write memory barrier between lcores
63 * Guarantees that the STORE operations that precede the
64 * rte_smp_wmb() call are globally visible across the lcores
65 * before the STORE operations that follows it.
67 static inline void rte_smp_wmb(void);
70 * Read memory barrier between lcores
72 * Guarantees that the LOAD operations that precede the
73 * rte_smp_rmb() call are globally visible across the lcores
74 * before the LOAD operations that follows it.
76 static inline void rte_smp_rmb(void);
79 /** @name I/O Memory Barrier
83 * General memory barrier for I/O device
85 * Guarantees that the LOAD and STORE operations that precede the
86 * rte_io_mb() call are visible to I/O device or CPU before the
87 * LOAD and STORE operations that follow it.
89 static inline void rte_io_mb(void);
92 * Write memory barrier for I/O device
94 * Guarantees that the STORE operations that precede the
95 * rte_io_wmb() call are visible to I/O device before the STORE
96 * operations that follow it.
98 static inline void rte_io_wmb(void);
101 * Read memory barrier for IO device
103 * Guarantees that the LOAD operations on I/O device that precede the
104 * rte_io_rmb() call are visible to CPU before the LOAD
105 * operations that follow it.
107 static inline void rte_io_rmb(void);
110 #endif /* __DOXYGEN__ */
115 * Guarantees that operation reordering does not occur at compile time
116 * for operations directly before and after the barrier.
118 #define rte_compiler_barrier() do { \
119 asm volatile ("" : : : "memory"); \
123 * Synchronization fence between threads based on the specified memory order.
125 static inline void rte_atomic_thread_fence(int memorder);
127 /*------------------------- 16 bit atomic operations -------------------------*/
130 * Atomic compare and set.
132 * (atomic) equivalent to:
134 * *dst = src (all 16-bit words)
137 * The destination location into which the value will be written.
139 * The expected value.
143 * Non-zero on success; 0 on failure.
146 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
148 #ifdef RTE_FORCE_INTRINSICS
150 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
152 return __sync_bool_compare_and_swap(dst, exp, src);
159 * (atomic) equivalent to:
165 * The destination location into which the value will be written.
169 * The original value at that location
171 static inline uint16_t
172 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
174 #ifdef RTE_FORCE_INTRINSICS
175 static inline uint16_t
176 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
178 #if defined(__clang__)
179 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
181 return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
187 * The atomic counter structure.
190 volatile int16_t cnt; /**< An internal counter value. */
194 * Static initializer for an atomic counter.
196 #define RTE_ATOMIC16_INIT(val) { (val) }
199 * Initialize an atomic counter.
202 * A pointer to the atomic counter.
205 rte_atomic16_init(rte_atomic16_t *v)
211 * Atomically read a 16-bit value from a counter.
214 * A pointer to the atomic counter.
216 * The value of the counter.
218 static inline int16_t
219 rte_atomic16_read(const rte_atomic16_t *v)
225 * Atomically set a counter to a 16-bit value.
228 * A pointer to the atomic counter.
230 * The new value for the counter.
233 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
239 * Atomically add a 16-bit value to an atomic counter.
242 * A pointer to the atomic counter.
244 * The value to be added to the counter.
247 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
249 __sync_fetch_and_add(&v->cnt, inc);
253 * Atomically subtract a 16-bit value from an atomic counter.
256 * A pointer to the atomic counter.
258 * The value to be subtracted from the counter.
261 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
263 __sync_fetch_and_sub(&v->cnt, dec);
267 * Atomically increment a counter by one.
270 * A pointer to the atomic counter.
273 rte_atomic16_inc(rte_atomic16_t *v);
275 #ifdef RTE_FORCE_INTRINSICS
277 rte_atomic16_inc(rte_atomic16_t *v)
279 rte_atomic16_add(v, 1);
284 * Atomically decrement a counter by one.
287 * A pointer to the atomic counter.
290 rte_atomic16_dec(rte_atomic16_t *v);
292 #ifdef RTE_FORCE_INTRINSICS
294 rte_atomic16_dec(rte_atomic16_t *v)
296 rte_atomic16_sub(v, 1);
301 * Atomically add a 16-bit value to a counter and return the result.
303 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
304 * returns the value of v after addition.
307 * A pointer to the atomic counter.
309 * The value to be added to the counter.
311 * The value of v after the addition.
313 static inline int16_t
314 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
316 return __sync_add_and_fetch(&v->cnt, inc);
320 * Atomically subtract a 16-bit value from a counter and return
323 * Atomically subtracts the 16-bit value (inc) from the atomic counter
324 * (v) and returns the value of v after the subtraction.
327 * A pointer to the atomic counter.
329 * The value to be subtracted from the counter.
331 * The value of v after the subtraction.
333 static inline int16_t
334 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
336 return __sync_sub_and_fetch(&v->cnt, dec);
340 * Atomically increment a 16-bit counter by one and test.
342 * Atomically increments the atomic counter (v) by one and returns true if
343 * the result is 0, or false in all other cases.
346 * A pointer to the atomic counter.
348 * True if the result after the increment operation is 0; false otherwise.
350 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
352 #ifdef RTE_FORCE_INTRINSICS
353 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
355 return __sync_add_and_fetch(&v->cnt, 1) == 0;
360 * Atomically decrement a 16-bit counter by one and test.
362 * Atomically decrements the atomic counter (v) by one and returns true if
363 * the result is 0, or false in all other cases.
366 * A pointer to the atomic counter.
368 * True if the result after the decrement operation is 0; false otherwise.
370 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
372 #ifdef RTE_FORCE_INTRINSICS
373 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
375 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
380 * Atomically test and set a 16-bit atomic counter.
382 * If the counter value is already set, return 0 (failed). Otherwise, set
383 * the counter value to 1 and return 1 (success).
386 * A pointer to the atomic counter.
388 * 0 if failed; else 1, success.
390 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
392 #ifdef RTE_FORCE_INTRINSICS
393 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
395 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
400 * Atomically set a 16-bit counter to 0.
403 * A pointer to the atomic counter.
405 static inline void rte_atomic16_clear(rte_atomic16_t *v)
410 /*------------------------- 32 bit atomic operations -------------------------*/
413 * Atomic compare and set.
415 * (atomic) equivalent to:
417 * *dst = src (all 32-bit words)
420 * The destination location into which the value will be written.
422 * The expected value.
426 * Non-zero on success; 0 on failure.
429 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
431 #ifdef RTE_FORCE_INTRINSICS
433 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
435 return __sync_bool_compare_and_swap(dst, exp, src);
442 * (atomic) equivalent to:
448 * The destination location into which the value will be written.
452 * The original value at that location
454 static inline uint32_t
455 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
457 #ifdef RTE_FORCE_INTRINSICS
458 static inline uint32_t
459 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
461 #if defined(__clang__)
462 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
464 return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
470 * The atomic counter structure.
473 volatile int32_t cnt; /**< An internal counter value. */
477 * Static initializer for an atomic counter.
479 #define RTE_ATOMIC32_INIT(val) { (val) }
482 * Initialize an atomic counter.
485 * A pointer to the atomic counter.
488 rte_atomic32_init(rte_atomic32_t *v)
494 * Atomically read a 32-bit value from a counter.
497 * A pointer to the atomic counter.
499 * The value of the counter.
501 static inline int32_t
502 rte_atomic32_read(const rte_atomic32_t *v)
508 * Atomically set a counter to a 32-bit value.
511 * A pointer to the atomic counter.
513 * The new value for the counter.
516 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
522 * Atomically add a 32-bit value to an atomic counter.
525 * A pointer to the atomic counter.
527 * The value to be added to the counter.
530 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
532 __sync_fetch_and_add(&v->cnt, inc);
536 * Atomically subtract a 32-bit value from an atomic counter.
539 * A pointer to the atomic counter.
541 * The value to be subtracted from the counter.
544 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
546 __sync_fetch_and_sub(&v->cnt, dec);
550 * Atomically increment a counter by one.
553 * A pointer to the atomic counter.
556 rte_atomic32_inc(rte_atomic32_t *v);
558 #ifdef RTE_FORCE_INTRINSICS
560 rte_atomic32_inc(rte_atomic32_t *v)
562 rte_atomic32_add(v, 1);
567 * Atomically decrement a counter by one.
570 * A pointer to the atomic counter.
573 rte_atomic32_dec(rte_atomic32_t *v);
575 #ifdef RTE_FORCE_INTRINSICS
577 rte_atomic32_dec(rte_atomic32_t *v)
579 rte_atomic32_sub(v,1);
584 * Atomically add a 32-bit value to a counter and return the result.
586 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
587 * returns the value of v after addition.
590 * A pointer to the atomic counter.
592 * The value to be added to the counter.
594 * The value of v after the addition.
596 static inline int32_t
597 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
599 return __sync_add_and_fetch(&v->cnt, inc);
603 * Atomically subtract a 32-bit value from a counter and return
606 * Atomically subtracts the 32-bit value (inc) from the atomic counter
607 * (v) and returns the value of v after the subtraction.
610 * A pointer to the atomic counter.
612 * The value to be subtracted from the counter.
614 * The value of v after the subtraction.
616 static inline int32_t
617 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
619 return __sync_sub_and_fetch(&v->cnt, dec);
623 * Atomically increment a 32-bit counter by one and test.
625 * Atomically increments the atomic counter (v) by one and returns true if
626 * the result is 0, or false in all other cases.
629 * A pointer to the atomic counter.
631 * True if the result after the increment operation is 0; false otherwise.
633 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
635 #ifdef RTE_FORCE_INTRINSICS
636 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
638 return __sync_add_and_fetch(&v->cnt, 1) == 0;
643 * Atomically decrement a 32-bit counter by one and test.
645 * Atomically decrements the atomic counter (v) by one and returns true if
646 * the result is 0, or false in all other cases.
649 * A pointer to the atomic counter.
651 * True if the result after the decrement operation is 0; false otherwise.
653 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
655 #ifdef RTE_FORCE_INTRINSICS
656 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
658 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
663 * Atomically test and set a 32-bit atomic counter.
665 * If the counter value is already set, return 0 (failed). Otherwise, set
666 * the counter value to 1 and return 1 (success).
669 * A pointer to the atomic counter.
671 * 0 if failed; else 1, success.
673 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
675 #ifdef RTE_FORCE_INTRINSICS
676 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
678 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
683 * Atomically set a 32-bit counter to 0.
686 * A pointer to the atomic counter.
688 static inline void rte_atomic32_clear(rte_atomic32_t *v)
693 /*------------------------- 64 bit atomic operations -------------------------*/
696 * An atomic compare and set function used by the mutex functions.
697 * (atomic) equivalent to:
699 * *dst = src (all 64-bit words)
702 * The destination into which the value will be written.
704 * The expected value.
708 * Non-zero on success; 0 on failure.
711 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
713 #ifdef RTE_FORCE_INTRINSICS
715 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
717 return __sync_bool_compare_and_swap(dst, exp, src);
724 * (atomic) equivalent to:
730 * The destination location into which the value will be written.
734 * The original value at that location
736 static inline uint64_t
737 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
739 #ifdef RTE_FORCE_INTRINSICS
740 static inline uint64_t
741 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
743 #if defined(__clang__)
744 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
746 return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
752 * The atomic counter structure.
755 volatile int64_t cnt; /**< Internal counter value. */
759 * Static initializer for an atomic counter.
761 #define RTE_ATOMIC64_INIT(val) { (val) }
764 * Initialize the atomic counter.
767 * A pointer to the atomic counter.
770 rte_atomic64_init(rte_atomic64_t *v);
772 #ifdef RTE_FORCE_INTRINSICS
774 rte_atomic64_init(rte_atomic64_t *v)
782 while (success == 0) {
784 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
792 * Atomically read a 64-bit counter.
795 * A pointer to the atomic counter.
797 * The value of the counter.
799 static inline int64_t
800 rte_atomic64_read(rte_atomic64_t *v);
802 #ifdef RTE_FORCE_INTRINSICS
803 static inline int64_t
804 rte_atomic64_read(rte_atomic64_t *v)
812 while (success == 0) {
814 /* replace the value by itself */
815 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
824 * Atomically set a 64-bit counter.
827 * A pointer to the atomic counter.
829 * The new value of the counter.
832 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
834 #ifdef RTE_FORCE_INTRINSICS
836 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
844 while (success == 0) {
846 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
854 * Atomically add a 64-bit value to a counter.
857 * A pointer to the atomic counter.
859 * The value to be added to the counter.
862 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
864 #ifdef RTE_FORCE_INTRINSICS
866 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
868 __sync_fetch_and_add(&v->cnt, inc);
873 * Atomically subtract a 64-bit value from a counter.
876 * A pointer to the atomic counter.
878 * The value to be subtracted from the counter.
881 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
883 #ifdef RTE_FORCE_INTRINSICS
885 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
887 __sync_fetch_and_sub(&v->cnt, dec);
892 * Atomically increment a 64-bit counter by one and test.
895 * A pointer to the atomic counter.
898 rte_atomic64_inc(rte_atomic64_t *v);
900 #ifdef RTE_FORCE_INTRINSICS
902 rte_atomic64_inc(rte_atomic64_t *v)
904 rte_atomic64_add(v, 1);
909 * Atomically decrement a 64-bit counter by one and test.
912 * A pointer to the atomic counter.
915 rte_atomic64_dec(rte_atomic64_t *v);
917 #ifdef RTE_FORCE_INTRINSICS
919 rte_atomic64_dec(rte_atomic64_t *v)
921 rte_atomic64_sub(v, 1);
926 * Add a 64-bit value to an atomic counter and return the result.
928 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
929 * returns the value of v after the addition.
932 * A pointer to the atomic counter.
934 * The value to be added to the counter.
936 * The value of v after the addition.
938 static inline int64_t
939 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
941 #ifdef RTE_FORCE_INTRINSICS
942 static inline int64_t
943 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
945 return __sync_add_and_fetch(&v->cnt, inc);
950 * Subtract a 64-bit value from an atomic counter and return the result.
952 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
953 * and returns the value of v after the subtraction.
956 * A pointer to the atomic counter.
958 * The value to be subtracted from the counter.
960 * The value of v after the subtraction.
962 static inline int64_t
963 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
965 #ifdef RTE_FORCE_INTRINSICS
966 static inline int64_t
967 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
969 return __sync_sub_and_fetch(&v->cnt, dec);
974 * Atomically increment a 64-bit counter by one and test.
976 * Atomically increments the atomic counter (v) by one and returns
977 * true if the result is 0, or false in all other cases.
980 * A pointer to the atomic counter.
982 * True if the result after the addition is 0; false otherwise.
984 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
986 #ifdef RTE_FORCE_INTRINSICS
987 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
989 return rte_atomic64_add_return(v, 1) == 0;
994 * Atomically decrement a 64-bit counter by one and test.
996 * Atomically decrements the atomic counter (v) by one and returns true if
997 * the result is 0, or false in all other cases.
1000 * A pointer to the atomic counter.
1002 * True if the result after subtraction is 0; false otherwise.
1004 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1006 #ifdef RTE_FORCE_INTRINSICS
1007 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1009 return rte_atomic64_sub_return(v, 1) == 0;
1014 * Atomically test and set a 64-bit atomic counter.
1016 * If the counter value is already set, return 0 (failed). Otherwise, set
1017 * the counter value to 1 and return 1 (success).
1020 * A pointer to the atomic counter.
1022 * 0 if failed; else 1, success.
1024 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1026 #ifdef RTE_FORCE_INTRINSICS
1027 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1029 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1034 * Atomically set a 64-bit counter to 0.
1037 * A pointer to the atomic counter.
1039 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1041 #ifdef RTE_FORCE_INTRINSICS
1042 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1044 rte_atomic64_set(v, 0);
1048 /*------------------------ 128 bit atomic operations -------------------------*/
1051 * 128-bit integer structure.
1059 __extension__ __int128 int128;
1062 } __rte_aligned(16) rte_int128_t;
1067 * An atomic compare and set function used by the mutex functions.
1068 * (Atomically) Equivalent to:
1076 * @note This function is currently available for the x86-64 and aarch64
1079 * @note The success and failure arguments must be one of the __ATOMIC_* values
1080 * defined in the C++11 standard. For details on their behavior, refer to the
1084 * The destination into which the value will be written.
1086 * Pointer to the expected value. If the operation fails, this memory is
1087 * updated with the actual value.
1089 * Pointer to the new value.
1091 * A value of true allows the comparison to spuriously fail and allows the
1092 * 'exp' update to occur non-atomically (i.e. a torn read may occur).
1093 * Implementations may ignore this argument and only implement the strong
1096 * If successful, the operation's memory behavior conforms to this (or a
1099 * If unsuccessful, the operation's memory behavior conforms to this (or a
1100 * stronger) model. This argument cannot be __ATOMIC_RELEASE,
1101 * __ATOMIC_ACQ_REL, or a stronger model than success.
1103 * Non-zero on success; 0 on failure.
1107 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1109 const rte_int128_t *src,
1114 #endif /* __DOXYGEN__ */
1116 #endif /* _RTE_ATOMIC_H_ */