4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
41 * This file defines a generic API for atomic operations.
45 #include <rte_common.h>
50 * General memory barrier.
52 * Guarantees that the LOAD and STORE operations generated before the
53 * barrier occur before the LOAD and STORE operations generated after.
54 * This function is architecture dependent.
56 static inline void rte_mb(void);
59 * Write memory barrier.
61 * Guarantees that the STORE operations generated before the barrier
62 * occur before the STORE operations generated after.
63 * This function is architecture dependent.
65 static inline void rte_wmb(void);
68 * Read memory barrier.
70 * Guarantees that the LOAD operations generated before the barrier
71 * occur before the LOAD operations generated after.
72 * This function is architecture dependent.
74 static inline void rte_rmb(void);
77 * General memory barrier between lcores
79 * Guarantees that the LOAD and STORE operations that precede the
80 * rte_smp_mb() call are globally visible across the lcores
81 * before the the LOAD and STORE operations that follows it.
83 static inline void rte_smp_mb(void);
86 * Write memory barrier between lcores
88 * Guarantees that the STORE operations that precede the
89 * rte_smp_wmb() call are globally visible across the lcores
90 * before the the STORE operations that follows it.
92 static inline void rte_smp_wmb(void);
95 * Read memory barrier between lcores
97 * Guarantees that the LOAD operations that precede the
98 * rte_smp_rmb() call are globally visible across the lcores
99 * before the the LOAD operations that follows it.
101 static inline void rte_smp_rmb(void);
104 * General memory barrier for I/O device
106 * Guarantees that the LOAD and STORE operations that precede the
107 * rte_io_mb() call are visible to I/O device or CPU before the
108 * LOAD and STORE operations that follow it.
110 static inline void rte_io_mb(void);
113 * Write memory barrier for I/O device
115 * Guarantees that the STORE operations that precede the
116 * rte_io_wmb() call are visible to I/O device before the STORE
117 * operations that follow it.
119 static inline void rte_io_wmb(void);
122 * Read memory barrier for IO device
124 * Guarantees that the LOAD operations on I/O device that precede the
125 * rte_io_rmb() call are visible to CPU before the LOAD
126 * operations that follow it.
128 static inline void rte_io_rmb(void);
130 #endif /* __DOXYGEN__ */
135 * Guarantees that operation reordering does not occur at compile time
136 * for operations directly before and after the barrier.
138 #define rte_compiler_barrier() do { \
139 asm volatile ("" : : : "memory"); \
142 /*------------------------- 16 bit atomic operations -------------------------*/
145 * Atomic compare and set.
147 * (atomic) equivalent to:
149 * *dst = src (all 16-bit words)
152 * The destination location into which the value will be written.
154 * The expected value.
158 * Non-zero on success; 0 on failure.
161 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
163 #ifdef RTE_FORCE_INTRINSICS
165 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
167 return __sync_bool_compare_and_swap(dst, exp, src);
172 * The atomic counter structure.
175 volatile int16_t cnt; /**< An internal counter value. */
179 * Static initializer for an atomic counter.
181 #define RTE_ATOMIC16_INIT(val) { (val) }
184 * Initialize an atomic counter.
187 * A pointer to the atomic counter.
190 rte_atomic16_init(rte_atomic16_t *v)
196 * Atomically read a 16-bit value from a counter.
199 * A pointer to the atomic counter.
201 * The value of the counter.
203 static inline int16_t
204 rte_atomic16_read(const rte_atomic16_t *v)
210 * Atomically set a counter to a 16-bit value.
213 * A pointer to the atomic counter.
215 * The new value for the counter.
218 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
224 * Atomically add a 16-bit value to an atomic counter.
227 * A pointer to the atomic counter.
229 * The value to be added to the counter.
232 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
234 __sync_fetch_and_add(&v->cnt, inc);
238 * Atomically subtract a 16-bit value from an atomic counter.
241 * A pointer to the atomic counter.
243 * The value to be subtracted from the counter.
246 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
248 __sync_fetch_and_sub(&v->cnt, dec);
252 * Atomically increment a counter by one.
255 * A pointer to the atomic counter.
258 rte_atomic16_inc(rte_atomic16_t *v);
260 #ifdef RTE_FORCE_INTRINSICS
262 rte_atomic16_inc(rte_atomic16_t *v)
264 rte_atomic16_add(v, 1);
269 * Atomically decrement a counter by one.
272 * A pointer to the atomic counter.
275 rte_atomic16_dec(rte_atomic16_t *v);
277 #ifdef RTE_FORCE_INTRINSICS
279 rte_atomic16_dec(rte_atomic16_t *v)
281 rte_atomic16_sub(v, 1);
286 * Atomically add a 16-bit value to a counter and return the result.
288 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
289 * returns the value of v after addition.
292 * A pointer to the atomic counter.
294 * The value to be added to the counter.
296 * The value of v after the addition.
298 static inline int16_t
299 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
301 return __sync_add_and_fetch(&v->cnt, inc);
305 * Atomically subtract a 16-bit value from a counter and return
308 * Atomically subtracts the 16-bit value (inc) from the atomic counter
309 * (v) and returns the value of v after the subtraction.
312 * A pointer to the atomic counter.
314 * The value to be subtracted from the counter.
316 * The value of v after the subtraction.
318 static inline int16_t
319 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
321 return __sync_sub_and_fetch(&v->cnt, dec);
325 * Atomically increment a 16-bit counter by one and test.
327 * Atomically increments the atomic counter (v) by one and returns true if
328 * the result is 0, or false in all other cases.
331 * A pointer to the atomic counter.
333 * True if the result after the increment operation is 0; false otherwise.
335 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
337 #ifdef RTE_FORCE_INTRINSICS
338 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
340 return __sync_add_and_fetch(&v->cnt, 1) == 0;
345 * Atomically decrement a 16-bit counter by one and test.
347 * Atomically decrements the atomic counter (v) by one and returns true if
348 * the result is 0, or false in all other cases.
351 * A pointer to the atomic counter.
353 * True if the result after the decrement operation is 0; false otherwise.
355 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
357 #ifdef RTE_FORCE_INTRINSICS
358 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
360 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
365 * Atomically test and set a 16-bit atomic counter.
367 * If the counter value is already set, return 0 (failed). Otherwise, set
368 * the counter value to 1 and return 1 (success).
371 * A pointer to the atomic counter.
373 * 0 if failed; else 1, success.
375 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
377 #ifdef RTE_FORCE_INTRINSICS
378 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
380 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
385 * Atomically set a 16-bit counter to 0.
388 * A pointer to the atomic counter.
390 static inline void rte_atomic16_clear(rte_atomic16_t *v)
395 /*------------------------- 32 bit atomic operations -------------------------*/
398 * Atomic compare and set.
400 * (atomic) equivalent to:
402 * *dst = src (all 32-bit words)
405 * The destination location into which the value will be written.
407 * The expected value.
411 * Non-zero on success; 0 on failure.
414 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
416 #ifdef RTE_FORCE_INTRINSICS
418 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
420 return __sync_bool_compare_and_swap(dst, exp, src);
425 * The atomic counter structure.
428 volatile int32_t cnt; /**< An internal counter value. */
432 * Static initializer for an atomic counter.
434 #define RTE_ATOMIC32_INIT(val) { (val) }
437 * Initialize an atomic counter.
440 * A pointer to the atomic counter.
443 rte_atomic32_init(rte_atomic32_t *v)
449 * Atomically read a 32-bit value from a counter.
452 * A pointer to the atomic counter.
454 * The value of the counter.
456 static inline int32_t
457 rte_atomic32_read(const rte_atomic32_t *v)
463 * Atomically set a counter to a 32-bit value.
466 * A pointer to the atomic counter.
468 * The new value for the counter.
471 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
477 * Atomically add a 32-bit value to an atomic counter.
480 * A pointer to the atomic counter.
482 * The value to be added to the counter.
485 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
487 __sync_fetch_and_add(&v->cnt, inc);
491 * Atomically subtract a 32-bit value from an atomic counter.
494 * A pointer to the atomic counter.
496 * The value to be subtracted from the counter.
499 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
501 __sync_fetch_and_sub(&v->cnt, dec);
505 * Atomically increment a counter by one.
508 * A pointer to the atomic counter.
511 rte_atomic32_inc(rte_atomic32_t *v);
513 #ifdef RTE_FORCE_INTRINSICS
515 rte_atomic32_inc(rte_atomic32_t *v)
517 rte_atomic32_add(v, 1);
522 * Atomically decrement a counter by one.
525 * A pointer to the atomic counter.
528 rte_atomic32_dec(rte_atomic32_t *v);
530 #ifdef RTE_FORCE_INTRINSICS
532 rte_atomic32_dec(rte_atomic32_t *v)
534 rte_atomic32_sub(v,1);
539 * Atomically add a 32-bit value to a counter and return the result.
541 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
542 * returns the value of v after addition.
545 * A pointer to the atomic counter.
547 * The value to be added to the counter.
549 * The value of v after the addition.
551 static inline int32_t
552 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
554 return __sync_add_and_fetch(&v->cnt, inc);
558 * Atomically subtract a 32-bit value from a counter and return
561 * Atomically subtracts the 32-bit value (inc) from the atomic counter
562 * (v) and returns the value of v after the subtraction.
565 * A pointer to the atomic counter.
567 * The value to be subtracted from the counter.
569 * The value of v after the subtraction.
571 static inline int32_t
572 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
574 return __sync_sub_and_fetch(&v->cnt, dec);
578 * Atomically increment a 32-bit counter by one and test.
580 * Atomically increments the atomic counter (v) by one and returns true if
581 * the result is 0, or false in all other cases.
584 * A pointer to the atomic counter.
586 * True if the result after the increment operation is 0; false otherwise.
588 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
590 #ifdef RTE_FORCE_INTRINSICS
591 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
593 return __sync_add_and_fetch(&v->cnt, 1) == 0;
598 * Atomically decrement a 32-bit counter by one and test.
600 * Atomically decrements the atomic counter (v) by one and returns true if
601 * the result is 0, or false in all other cases.
604 * A pointer to the atomic counter.
606 * True if the result after the decrement operation is 0; false otherwise.
608 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
610 #ifdef RTE_FORCE_INTRINSICS
611 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
613 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
618 * Atomically test and set a 32-bit atomic counter.
620 * If the counter value is already set, return 0 (failed). Otherwise, set
621 * the counter value to 1 and return 1 (success).
624 * A pointer to the atomic counter.
626 * 0 if failed; else 1, success.
628 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
630 #ifdef RTE_FORCE_INTRINSICS
631 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
633 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
638 * Atomically set a 32-bit counter to 0.
641 * A pointer to the atomic counter.
643 static inline void rte_atomic32_clear(rte_atomic32_t *v)
648 /*------------------------- 64 bit atomic operations -------------------------*/
651 * An atomic compare and set function used by the mutex functions.
652 * (atomic) equivalent to:
654 * *dst = src (all 64-bit words)
657 * The destination into which the value will be written.
659 * The expected value.
663 * Non-zero on success; 0 on failure.
666 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
668 #ifdef RTE_FORCE_INTRINSICS
670 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
672 return __sync_bool_compare_and_swap(dst, exp, src);
677 * The atomic counter structure.
680 volatile int64_t cnt; /**< Internal counter value. */
684 * Static initializer for an atomic counter.
686 #define RTE_ATOMIC64_INIT(val) { (val) }
689 * Initialize the atomic counter.
692 * A pointer to the atomic counter.
695 rte_atomic64_init(rte_atomic64_t *v);
697 #ifdef RTE_FORCE_INTRINSICS
699 rte_atomic64_init(rte_atomic64_t *v)
707 while (success == 0) {
709 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
717 * Atomically read a 64-bit counter.
720 * A pointer to the atomic counter.
722 * The value of the counter.
724 static inline int64_t
725 rte_atomic64_read(rte_atomic64_t *v);
727 #ifdef RTE_FORCE_INTRINSICS
728 static inline int64_t
729 rte_atomic64_read(rte_atomic64_t *v)
737 while (success == 0) {
739 /* replace the value by itself */
740 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
749 * Atomically set a 64-bit counter.
752 * A pointer to the atomic counter.
754 * The new value of the counter.
757 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
759 #ifdef RTE_FORCE_INTRINSICS
761 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
769 while (success == 0) {
771 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
779 * Atomically add a 64-bit value to a counter.
782 * A pointer to the atomic counter.
784 * The value to be added to the counter.
787 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
789 #ifdef RTE_FORCE_INTRINSICS
791 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
793 __sync_fetch_and_add(&v->cnt, inc);
798 * Atomically subtract a 64-bit value from a counter.
801 * A pointer to the atomic counter.
803 * The value to be subtracted from the counter.
806 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
808 #ifdef RTE_FORCE_INTRINSICS
810 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
812 __sync_fetch_and_sub(&v->cnt, dec);
817 * Atomically increment a 64-bit counter by one and test.
820 * A pointer to the atomic counter.
823 rte_atomic64_inc(rte_atomic64_t *v);
825 #ifdef RTE_FORCE_INTRINSICS
827 rte_atomic64_inc(rte_atomic64_t *v)
829 rte_atomic64_add(v, 1);
834 * Atomically decrement a 64-bit counter by one and test.
837 * A pointer to the atomic counter.
840 rte_atomic64_dec(rte_atomic64_t *v);
842 #ifdef RTE_FORCE_INTRINSICS
844 rte_atomic64_dec(rte_atomic64_t *v)
846 rte_atomic64_sub(v, 1);
851 * Add a 64-bit value to an atomic counter and return the result.
853 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
854 * returns the value of v after the addition.
857 * A pointer to the atomic counter.
859 * The value to be added to the counter.
861 * The value of v after the addition.
863 static inline int64_t
864 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
866 #ifdef RTE_FORCE_INTRINSICS
867 static inline int64_t
868 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
870 return __sync_add_and_fetch(&v->cnt, inc);
875 * Subtract a 64-bit value from an atomic counter and return the result.
877 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
878 * and returns the value of v after the subtraction.
881 * A pointer to the atomic counter.
883 * The value to be subtracted from the counter.
885 * The value of v after the subtraction.
887 static inline int64_t
888 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
890 #ifdef RTE_FORCE_INTRINSICS
891 static inline int64_t
892 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
894 return __sync_sub_and_fetch(&v->cnt, dec);
899 * Atomically increment a 64-bit counter by one and test.
901 * Atomically increments the atomic counter (v) by one and returns
902 * true if the result is 0, or false in all other cases.
905 * A pointer to the atomic counter.
907 * True if the result after the addition is 0; false otherwise.
909 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
911 #ifdef RTE_FORCE_INTRINSICS
912 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
914 return rte_atomic64_add_return(v, 1) == 0;
919 * Atomically decrement a 64-bit counter by one and test.
921 * Atomically decrements the atomic counter (v) by one and returns true if
922 * the result is 0, or false in all other cases.
925 * A pointer to the atomic counter.
927 * True if the result after subtraction is 0; false otherwise.
929 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
931 #ifdef RTE_FORCE_INTRINSICS
932 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
934 return rte_atomic64_sub_return(v, 1) == 0;
939 * Atomically test and set a 64-bit atomic counter.
941 * If the counter value is already set, return 0 (failed). Otherwise, set
942 * the counter value to 1 and return 1 (success).
945 * A pointer to the atomic counter.
947 * 0 if failed; else 1, success.
949 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
951 #ifdef RTE_FORCE_INTRINSICS
952 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
954 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
959 * Atomically set a 64-bit counter to 0.
962 * A pointer to the atomic counter.
964 static inline void rte_atomic64_clear(rte_atomic64_t *v);
966 #ifdef RTE_FORCE_INTRINSICS
967 static inline void rte_atomic64_clear(rte_atomic64_t *v)
969 rte_atomic64_set(v, 0);
973 #endif /* _RTE_ATOMIC_H_ */