1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
12 * This file defines a generic API for atomic operations.
16 #include <rte_common.h>
21 * General memory barrier.
23 * Guarantees that the LOAD and STORE operations generated before the
24 * barrier occur before the LOAD and STORE operations generated after.
25 * This function is architecture dependent.
27 static inline void rte_mb(void);
30 * Write memory barrier.
32 * Guarantees that the STORE operations generated before the barrier
33 * occur before the STORE operations generated after.
34 * This function is architecture dependent.
36 static inline void rte_wmb(void);
39 * Read memory barrier.
41 * Guarantees that the LOAD operations generated before the barrier
42 * occur before the LOAD operations generated after.
43 * This function is architecture dependent.
45 static inline void rte_rmb(void);
48 * General memory barrier between lcores
50 * Guarantees that the LOAD and STORE operations that precede the
51 * rte_smp_mb() call are globally visible across the lcores
52 * before the the LOAD and STORE operations that follows it.
54 static inline void rte_smp_mb(void);
57 * Write memory barrier between lcores
59 * Guarantees that the STORE operations that precede the
60 * rte_smp_wmb() call are globally visible across the lcores
61 * before the the STORE operations that follows it.
63 static inline void rte_smp_wmb(void);
66 * Read memory barrier between lcores
68 * Guarantees that the LOAD operations that precede the
69 * rte_smp_rmb() call are globally visible across the lcores
70 * before the the LOAD operations that follows it.
72 static inline void rte_smp_rmb(void);
75 * General memory barrier for I/O device
77 * Guarantees that the LOAD and STORE operations that precede the
78 * rte_io_mb() call are visible to I/O device or CPU before the
79 * LOAD and STORE operations that follow it.
81 static inline void rte_io_mb(void);
84 * Write memory barrier for I/O device
86 * Guarantees that the STORE operations that precede the
87 * rte_io_wmb() call are visible to I/O device before the STORE
88 * operations that follow it.
90 static inline void rte_io_wmb(void);
93 * Read memory barrier for IO device
95 * Guarantees that the LOAD operations on I/O device that precede the
96 * rte_io_rmb() call are visible to CPU before the LOAD
97 * operations that follow it.
99 static inline void rte_io_rmb(void);
101 #endif /* __DOXYGEN__ */
106 * Guarantees that operation reordering does not occur at compile time
107 * for operations directly before and after the barrier.
109 #define rte_compiler_barrier() do { \
110 asm volatile ("" : : : "memory"); \
113 /*------------------------- 16 bit atomic operations -------------------------*/
116 * Atomic compare and set.
118 * (atomic) equivalent to:
120 * *dst = src (all 16-bit words)
123 * The destination location into which the value will be written.
125 * The expected value.
129 * Non-zero on success; 0 on failure.
132 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
134 #ifdef RTE_FORCE_INTRINSICS
136 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
138 return __sync_bool_compare_and_swap(dst, exp, src);
143 * The atomic counter structure.
146 volatile int16_t cnt; /**< An internal counter value. */
150 * Static initializer for an atomic counter.
152 #define RTE_ATOMIC16_INIT(val) { (val) }
155 * Initialize an atomic counter.
158 * A pointer to the atomic counter.
161 rte_atomic16_init(rte_atomic16_t *v)
167 * Atomically read a 16-bit value from a counter.
170 * A pointer to the atomic counter.
172 * The value of the counter.
174 static inline int16_t
175 rte_atomic16_read(const rte_atomic16_t *v)
181 * Atomically set a counter to a 16-bit value.
184 * A pointer to the atomic counter.
186 * The new value for the counter.
189 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
195 * Atomically add a 16-bit value to an atomic counter.
198 * A pointer to the atomic counter.
200 * The value to be added to the counter.
203 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
205 __sync_fetch_and_add(&v->cnt, inc);
209 * Atomically subtract a 16-bit value from an atomic counter.
212 * A pointer to the atomic counter.
214 * The value to be subtracted from the counter.
217 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
219 __sync_fetch_and_sub(&v->cnt, dec);
223 * Atomically increment a counter by one.
226 * A pointer to the atomic counter.
229 rte_atomic16_inc(rte_atomic16_t *v);
231 #ifdef RTE_FORCE_INTRINSICS
233 rte_atomic16_inc(rte_atomic16_t *v)
235 rte_atomic16_add(v, 1);
240 * Atomically decrement a counter by one.
243 * A pointer to the atomic counter.
246 rte_atomic16_dec(rte_atomic16_t *v);
248 #ifdef RTE_FORCE_INTRINSICS
250 rte_atomic16_dec(rte_atomic16_t *v)
252 rte_atomic16_sub(v, 1);
257 * Atomically add a 16-bit value to a counter and return the result.
259 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
260 * returns the value of v after addition.
263 * A pointer to the atomic counter.
265 * The value to be added to the counter.
267 * The value of v after the addition.
269 static inline int16_t
270 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
272 return __sync_add_and_fetch(&v->cnt, inc);
276 * Atomically subtract a 16-bit value from a counter and return
279 * Atomically subtracts the 16-bit value (inc) from the atomic counter
280 * (v) and returns the value of v after the subtraction.
283 * A pointer to the atomic counter.
285 * The value to be subtracted from the counter.
287 * The value of v after the subtraction.
289 static inline int16_t
290 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
292 return __sync_sub_and_fetch(&v->cnt, dec);
296 * Atomically increment a 16-bit counter by one and test.
298 * Atomically increments the atomic counter (v) by one and returns true if
299 * the result is 0, or false in all other cases.
302 * A pointer to the atomic counter.
304 * True if the result after the increment operation is 0; false otherwise.
306 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
308 #ifdef RTE_FORCE_INTRINSICS
309 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
311 return __sync_add_and_fetch(&v->cnt, 1) == 0;
316 * Atomically decrement a 16-bit counter by one and test.
318 * Atomically decrements the atomic counter (v) by one and returns true if
319 * the result is 0, or false in all other cases.
322 * A pointer to the atomic counter.
324 * True if the result after the decrement operation is 0; false otherwise.
326 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
328 #ifdef RTE_FORCE_INTRINSICS
329 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
331 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
336 * Atomically test and set a 16-bit atomic counter.
338 * If the counter value is already set, return 0 (failed). Otherwise, set
339 * the counter value to 1 and return 1 (success).
342 * A pointer to the atomic counter.
344 * 0 if failed; else 1, success.
346 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
348 #ifdef RTE_FORCE_INTRINSICS
349 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
351 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
356 * Atomically set a 16-bit counter to 0.
359 * A pointer to the atomic counter.
361 static inline void rte_atomic16_clear(rte_atomic16_t *v)
366 /*------------------------- 32 bit atomic operations -------------------------*/
369 * Atomic compare and set.
371 * (atomic) equivalent to:
373 * *dst = src (all 32-bit words)
376 * The destination location into which the value will be written.
378 * The expected value.
382 * Non-zero on success; 0 on failure.
385 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
387 #ifdef RTE_FORCE_INTRINSICS
389 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
391 return __sync_bool_compare_and_swap(dst, exp, src);
396 * The atomic counter structure.
399 volatile int32_t cnt; /**< An internal counter value. */
403 * Static initializer for an atomic counter.
405 #define RTE_ATOMIC32_INIT(val) { (val) }
408 * Initialize an atomic counter.
411 * A pointer to the atomic counter.
414 rte_atomic32_init(rte_atomic32_t *v)
420 * Atomically read a 32-bit value from a counter.
423 * A pointer to the atomic counter.
425 * The value of the counter.
427 static inline int32_t
428 rte_atomic32_read(const rte_atomic32_t *v)
434 * Atomically set a counter to a 32-bit value.
437 * A pointer to the atomic counter.
439 * The new value for the counter.
442 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
448 * Atomically add a 32-bit value to an atomic counter.
451 * A pointer to the atomic counter.
453 * The value to be added to the counter.
456 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
458 __sync_fetch_and_add(&v->cnt, inc);
462 * Atomically subtract a 32-bit value from an atomic counter.
465 * A pointer to the atomic counter.
467 * The value to be subtracted from the counter.
470 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
472 __sync_fetch_and_sub(&v->cnt, dec);
476 * Atomically increment a counter by one.
479 * A pointer to the atomic counter.
482 rte_atomic32_inc(rte_atomic32_t *v);
484 #ifdef RTE_FORCE_INTRINSICS
486 rte_atomic32_inc(rte_atomic32_t *v)
488 rte_atomic32_add(v, 1);
493 * Atomically decrement a counter by one.
496 * A pointer to the atomic counter.
499 rte_atomic32_dec(rte_atomic32_t *v);
501 #ifdef RTE_FORCE_INTRINSICS
503 rte_atomic32_dec(rte_atomic32_t *v)
505 rte_atomic32_sub(v,1);
510 * Atomically add a 32-bit value to a counter and return the result.
512 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
513 * returns the value of v after addition.
516 * A pointer to the atomic counter.
518 * The value to be added to the counter.
520 * The value of v after the addition.
522 static inline int32_t
523 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
525 return __sync_add_and_fetch(&v->cnt, inc);
529 * Atomically subtract a 32-bit value from a counter and return
532 * Atomically subtracts the 32-bit value (inc) from the atomic counter
533 * (v) and returns the value of v after the subtraction.
536 * A pointer to the atomic counter.
538 * The value to be subtracted from the counter.
540 * The value of v after the subtraction.
542 static inline int32_t
543 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
545 return __sync_sub_and_fetch(&v->cnt, dec);
549 * Atomically increment a 32-bit counter by one and test.
551 * Atomically increments the atomic counter (v) by one and returns true if
552 * the result is 0, or false in all other cases.
555 * A pointer to the atomic counter.
557 * True if the result after the increment operation is 0; false otherwise.
559 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
561 #ifdef RTE_FORCE_INTRINSICS
562 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
564 return __sync_add_and_fetch(&v->cnt, 1) == 0;
569 * Atomically decrement a 32-bit counter by one and test.
571 * Atomically decrements the atomic counter (v) by one and returns true if
572 * the result is 0, or false in all other cases.
575 * A pointer to the atomic counter.
577 * True if the result after the decrement operation is 0; false otherwise.
579 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
581 #ifdef RTE_FORCE_INTRINSICS
582 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
584 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
589 * Atomically test and set a 32-bit atomic counter.
591 * If the counter value is already set, return 0 (failed). Otherwise, set
592 * the counter value to 1 and return 1 (success).
595 * A pointer to the atomic counter.
597 * 0 if failed; else 1, success.
599 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
601 #ifdef RTE_FORCE_INTRINSICS
602 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
604 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
609 * Atomically set a 32-bit counter to 0.
612 * A pointer to the atomic counter.
614 static inline void rte_atomic32_clear(rte_atomic32_t *v)
619 /*------------------------- 64 bit atomic operations -------------------------*/
622 * An atomic compare and set function used by the mutex functions.
623 * (atomic) equivalent to:
625 * *dst = src (all 64-bit words)
628 * The destination into which the value will be written.
630 * The expected value.
634 * Non-zero on success; 0 on failure.
637 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
639 #ifdef RTE_FORCE_INTRINSICS
641 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
643 return __sync_bool_compare_and_swap(dst, exp, src);
648 * The atomic counter structure.
651 volatile int64_t cnt; /**< Internal counter value. */
655 * Static initializer for an atomic counter.
657 #define RTE_ATOMIC64_INIT(val) { (val) }
660 * Initialize the atomic counter.
663 * A pointer to the atomic counter.
666 rte_atomic64_init(rte_atomic64_t *v);
668 #ifdef RTE_FORCE_INTRINSICS
670 rte_atomic64_init(rte_atomic64_t *v)
678 while (success == 0) {
680 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
688 * Atomically read a 64-bit counter.
691 * A pointer to the atomic counter.
693 * The value of the counter.
695 static inline int64_t
696 rte_atomic64_read(rte_atomic64_t *v);
698 #ifdef RTE_FORCE_INTRINSICS
699 static inline int64_t
700 rte_atomic64_read(rte_atomic64_t *v)
708 while (success == 0) {
710 /* replace the value by itself */
711 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
720 * Atomically set a 64-bit counter.
723 * A pointer to the atomic counter.
725 * The new value of the counter.
728 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
730 #ifdef RTE_FORCE_INTRINSICS
732 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
740 while (success == 0) {
742 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
750 * Atomically add a 64-bit value to a counter.
753 * A pointer to the atomic counter.
755 * The value to be added to the counter.
758 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
760 #ifdef RTE_FORCE_INTRINSICS
762 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
764 __sync_fetch_and_add(&v->cnt, inc);
769 * Atomically subtract a 64-bit value from a counter.
772 * A pointer to the atomic counter.
774 * The value to be subtracted from the counter.
777 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
779 #ifdef RTE_FORCE_INTRINSICS
781 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
783 __sync_fetch_and_sub(&v->cnt, dec);
788 * Atomically increment a 64-bit counter by one and test.
791 * A pointer to the atomic counter.
794 rte_atomic64_inc(rte_atomic64_t *v);
796 #ifdef RTE_FORCE_INTRINSICS
798 rte_atomic64_inc(rte_atomic64_t *v)
800 rte_atomic64_add(v, 1);
805 * Atomically decrement a 64-bit counter by one and test.
808 * A pointer to the atomic counter.
811 rte_atomic64_dec(rte_atomic64_t *v);
813 #ifdef RTE_FORCE_INTRINSICS
815 rte_atomic64_dec(rte_atomic64_t *v)
817 rte_atomic64_sub(v, 1);
822 * Add a 64-bit value to an atomic counter and return the result.
824 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
825 * returns the value of v after the addition.
828 * A pointer to the atomic counter.
830 * The value to be added to the counter.
832 * The value of v after the addition.
834 static inline int64_t
835 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
837 #ifdef RTE_FORCE_INTRINSICS
838 static inline int64_t
839 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
841 return __sync_add_and_fetch(&v->cnt, inc);
846 * Subtract a 64-bit value from an atomic counter and return the result.
848 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
849 * and returns the value of v after the subtraction.
852 * A pointer to the atomic counter.
854 * The value to be subtracted from the counter.
856 * The value of v after the subtraction.
858 static inline int64_t
859 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
861 #ifdef RTE_FORCE_INTRINSICS
862 static inline int64_t
863 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
865 return __sync_sub_and_fetch(&v->cnt, dec);
870 * Atomically increment a 64-bit counter by one and test.
872 * Atomically increments the atomic counter (v) by one and returns
873 * true if the result is 0, or false in all other cases.
876 * A pointer to the atomic counter.
878 * True if the result after the addition is 0; false otherwise.
880 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
882 #ifdef RTE_FORCE_INTRINSICS
883 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
885 return rte_atomic64_add_return(v, 1) == 0;
890 * Atomically decrement a 64-bit counter by one and test.
892 * Atomically decrements the atomic counter (v) by one and returns true if
893 * the result is 0, or false in all other cases.
896 * A pointer to the atomic counter.
898 * True if the result after subtraction is 0; false otherwise.
900 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
902 #ifdef RTE_FORCE_INTRINSICS
903 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
905 return rte_atomic64_sub_return(v, 1) == 0;
910 * Atomically test and set a 64-bit atomic counter.
912 * If the counter value is already set, return 0 (failed). Otherwise, set
913 * the counter value to 1 and return 1 (success).
916 * A pointer to the atomic counter.
918 * 0 if failed; else 1, success.
920 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
922 #ifdef RTE_FORCE_INTRINSICS
923 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
925 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
930 * Atomically set a 64-bit counter to 0.
933 * A pointer to the atomic counter.
935 static inline void rte_atomic64_clear(rte_atomic64_t *v);
937 #ifdef RTE_FORCE_INTRINSICS
938 static inline void rte_atomic64_clear(rte_atomic64_t *v)
940 rte_atomic64_set(v, 0);
944 #endif /* _RTE_ATOMIC_H_ */