4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
41 * This file defines a generic API for atomic
42 * operations. The implementation is architecture-specific.
44 * See lib/librte_eal/common/include/i686/arch/rte_atomic.h
45 * See lib/librte_eal/common/include/x86_64/arch/rte_atomic.h
54 #if RTE_MAX_LCORE == 1
55 #define MPLOCKED /**< No need to insert MP lock prefix. */
57 #define MPLOCKED "lock ; " /**< Insert MP lock prefix. */
61 * General memory barrier.
63 * Guarantees that the LOAD and STORE operations generated before the
64 * barrier occur before the LOAD and STORE operations generated after.
66 #define rte_mb() _mm_mfence()
69 * Write memory barrier.
71 * Guarantees that the STORE operations generated before the barrier
72 * occur before the STORE operations generated after.
74 #define rte_wmb() _mm_sfence()
77 * Read memory barrier.
79 * Guarantees that the LOAD operations generated before the barrier
80 * occur before the LOAD operations generated after.
82 #define rte_rmb() _mm_lfence()
87 * Guarantees that operation reordering does not occur at compile time
88 * for operations directly before and after the barrier.
90 #define rte_compiler_barrier() do { \
91 asm volatile ("" : : : "memory"); \
94 #include <emmintrin.h>
98 * Atomic Operations on x86_64
101 /*------------------------- 16 bit atomic operations -------------------------*/
104 * Atomic compare and set.
106 * (atomic) equivalent to:
108 * *dst = src (all 16-bit words)
111 * The destination location into which the value will be written.
113 * The expected value.
117 * Non-zero on success; 0 on failure.
120 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
122 #ifndef RTE_FORCE_INTRINSICS
127 "cmpxchgw %[src], %[dst];"
129 : [res] "=a" (res), /* output */
131 : [src] "r" (src), /* input */
134 : "memory"); /* no-clobber list */
137 return __sync_bool_compare_and_swap(dst, exp, src);
142 * The atomic counter structure.
145 volatile int16_t cnt; /**< An internal counter value. */
149 * Static initializer for an atomic counter.
151 #define RTE_ATOMIC16_INIT(val) { (val) }
154 * Initialize an atomic counter.
157 * A pointer to the atomic counter.
160 rte_atomic16_init(rte_atomic16_t *v)
166 * Atomically read a 16-bit value from a counter.
169 * A pointer to the atomic counter.
171 * The value of the counter.
173 static inline int16_t
174 rte_atomic16_read(const rte_atomic16_t *v)
180 * Atomically set a counter to a 16-bit value.
183 * A pointer to the atomic counter.
185 * The new value for the counter.
188 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
194 * Atomically add a 16-bit value to an atomic counter.
197 * A pointer to the atomic counter.
199 * The value to be added to the counter.
202 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
204 __sync_fetch_and_add(&v->cnt, inc);
208 * Atomically subtract a 16-bit value from an atomic counter.
211 * A pointer to the atomic counter.
213 * The value to be subtracted from the counter.
216 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
218 __sync_fetch_and_sub(&v->cnt, dec);
222 * Atomically increment a counter by one.
225 * A pointer to the atomic counter.
228 rte_atomic16_inc(rte_atomic16_t *v)
230 #ifndef RTE_FORCE_INTRINSICS
234 : [cnt] "=m" (v->cnt) /* output */
235 : "m" (v->cnt) /* input */
238 rte_atomic16_add(v, 1);
243 * Atomically decrement a counter by one.
246 * A pointer to the atomic counter.
249 rte_atomic16_dec(rte_atomic16_t *v)
251 #ifndef RTE_FORCE_INTRINSICS
255 : [cnt] "=m" (v->cnt) /* output */
256 : "m" (v->cnt) /* input */
259 rte_atomic16_sub(v, 1);
264 * Atomically add a 16-bit value to a counter and return the result.
266 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
267 * returns the value of v after addition.
270 * A pointer to the atomic counter.
272 * The value to be added to the counter.
274 * The value of v after the addition.
276 static inline int16_t
277 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
279 return __sync_add_and_fetch(&v->cnt, inc);
283 * Atomically subtract a 16-bit value from a counter and return
286 * Atomically subtracts the 16-bit value (inc) from the atomic counter
287 * (v) and returns the value of v after the subtraction.
290 * A pointer to the atomic counter.
292 * The value to be subtracted from the counter.
294 * The value of v after the subtraction.
296 static inline int16_t
297 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
299 return __sync_sub_and_fetch(&v->cnt, dec);
303 * Atomically increment a 16-bit counter by one and test.
305 * Atomically increments the atomic counter (v) by one and returns true if
306 * the result is 0, or false in all other cases.
309 * A pointer to the atomic counter.
311 * True if the result after the increment operation is 0; false otherwise.
313 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
315 #ifndef RTE_FORCE_INTRINSICS
322 : [cnt] "+m" (v->cnt), /* output */
327 return (__sync_add_and_fetch(&v->cnt, 1) == 0);
332 * Atomically decrement a 16-bit counter by one and test.
334 * Atomically decrements the atomic counter (v) by one and returns true if
335 * the result is 0, or false in all other cases.
338 * A pointer to the atomic counter.
340 * True if the result after the decrement operation is 0; false otherwise.
342 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
344 #ifndef RTE_FORCE_INTRINSICS
347 asm volatile(MPLOCKED
350 : [cnt] "+m" (v->cnt), /* output */
355 return (__sync_sub_and_fetch(&v->cnt, 1) == 0);
360 * Atomically test and set a 16-bit atomic counter.
362 * If the counter value is already set, return 0 (failed). Otherwise, set
363 * the counter value to 1 and return 1 (success).
366 * A pointer to the atomic counter.
368 * 0 if failed; else 1, success.
370 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
372 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
376 * Atomically set a 16-bit counter to 0.
379 * A pointer to the atomic counter.
381 static inline void rte_atomic16_clear(rte_atomic16_t *v)
386 /*------------------------- 32 bit atomic operations -------------------------*/
389 * Atomic compare and set.
391 * (atomic) equivalent to:
393 * *dst = src (all 32-bit words)
396 * The destination location into which the value will be written.
398 * The expected value.
402 * Non-zero on success; 0 on failure.
405 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
407 #ifndef RTE_FORCE_INTRINSICS
412 "cmpxchgl %[src], %[dst];"
414 : [res] "=a" (res), /* output */
416 : [src] "r" (src), /* input */
419 : "memory"); /* no-clobber list */
422 return __sync_bool_compare_and_swap(dst, exp, src);
427 * The atomic counter structure.
430 volatile int32_t cnt; /**< An internal counter value. */
434 * Static initializer for an atomic counter.
436 #define RTE_ATOMIC32_INIT(val) { (val) }
439 * Initialize an atomic counter.
442 * A pointer to the atomic counter.
445 rte_atomic32_init(rte_atomic32_t *v)
451 * Atomically read a 32-bit value from a counter.
454 * A pointer to the atomic counter.
456 * The value of the counter.
458 static inline int32_t
459 rte_atomic32_read(const rte_atomic32_t *v)
465 * Atomically set a counter to a 32-bit value.
468 * A pointer to the atomic counter.
470 * The new value for the counter.
473 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
479 * Atomically add a 32-bit value to an atomic counter.
482 * A pointer to the atomic counter.
484 * The value to be added to the counter.
487 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
489 __sync_fetch_and_add(&v->cnt, inc);
493 * Atomically subtract a 32-bit value from an atomic counter.
496 * A pointer to the atomic counter.
498 * The value to be subtracted from the counter.
501 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
503 __sync_fetch_and_sub(&v->cnt, dec);
507 * Atomically increment a counter by one.
510 * A pointer to the atomic counter.
513 rte_atomic32_inc(rte_atomic32_t *v)
515 #ifndef RTE_FORCE_INTRINSICS
519 : [cnt] "=m" (v->cnt) /* output */
520 : "m" (v->cnt) /* input */
523 rte_atomic32_add(v, 1);
528 * Atomically decrement a counter by one.
531 * A pointer to the atomic counter.
534 rte_atomic32_dec(rte_atomic32_t *v)
536 #ifndef RTE_FORCE_INTRINSICS
540 : [cnt] "=m" (v->cnt) /* output */
541 : "m" (v->cnt) /* input */
544 rte_atomic32_sub(v,1);
549 * Atomically add a 32-bit value to a counter and return the result.
551 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
552 * returns the value of v after addition.
555 * A pointer to the atomic counter.
557 * The value to be added to the counter.
559 * The value of v after the addition.
561 static inline int32_t
562 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
564 return __sync_add_and_fetch(&v->cnt, inc);
568 * Atomically subtract a 32-bit value from a counter and return
571 * Atomically subtracts the 32-bit value (inc) from the atomic counter
572 * (v) and returns the value of v after the subtraction.
575 * A pointer to the atomic counter.
577 * The value to be subtracted from the counter.
579 * The value of v after the subtraction.
581 static inline int32_t
582 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
584 return __sync_sub_and_fetch(&v->cnt, dec);
588 * Atomically increment a 32-bit counter by one and test.
590 * Atomically increments the atomic counter (v) by one and returns true if
591 * the result is 0, or false in all other cases.
594 * A pointer to the atomic counter.
596 * True if the result after the increment operation is 0; false otherwise.
598 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
600 #ifndef RTE_FORCE_INTRINSICS
607 : [cnt] "+m" (v->cnt), /* output */
612 return (__sync_add_and_fetch(&v->cnt, 1) == 0);
617 * Atomically decrement a 32-bit counter by one and test.
619 * Atomically decrements the atomic counter (v) by one and returns true if
620 * the result is 0, or false in all other cases.
623 * A pointer to the atomic counter.
625 * True if the result after the decrement operation is 0; false otherwise.
627 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
629 #ifndef RTE_FORCE_INTRINSICS
632 asm volatile(MPLOCKED
635 : [cnt] "+m" (v->cnt), /* output */
640 return (__sync_sub_and_fetch(&v->cnt, 1) == 0);
645 * Atomically test and set a 32-bit atomic counter.
647 * If the counter value is already set, return 0 (failed). Otherwise, set
648 * the counter value to 1 and return 1 (success).
651 * A pointer to the atomic counter.
653 * 0 if failed; else 1, success.
655 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
657 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
661 * Atomically set a 32-bit counter to 0.
664 * A pointer to the atomic counter.
666 static inline void rte_atomic32_clear(rte_atomic32_t *v)
671 #ifndef RTE_FORCE_INTRINSICS
672 /* any other functions are in arch specific files */
673 #include "arch/rte_atomic.h"
678 /*------------------------- 64 bit atomic operations -------------------------*/
681 * An atomic compare and set function used by the mutex functions.
682 * (atomic) equivalent to:
684 * *dst = src (all 64-bit words)
687 * The destination into which the value will be written.
689 * The expected value.
693 * Non-zero on success; 0 on failure.
696 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
699 * The atomic counter structure.
702 volatile int64_t cnt; /**< Internal counter value. */
706 * Static initializer for an atomic counter.
708 #define RTE_ATOMIC64_INIT(val) { (val) }
711 * Initialize the atomic counter.
714 * A pointer to the atomic counter.
717 rte_atomic64_init(rte_atomic64_t *v);
720 * Atomically read a 64-bit counter.
723 * A pointer to the atomic counter.
725 * The value of the counter.
727 static inline int64_t
728 rte_atomic64_read(rte_atomic64_t *v);
731 * Atomically set a 64-bit counter.
734 * A pointer to the atomic counter.
736 * The new value of the counter.
739 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
742 * Atomically add a 64-bit value to a counter.
745 * A pointer to the atomic counter.
747 * The value to be added to the counter.
750 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
753 * Atomically subtract a 64-bit value from a counter.
756 * A pointer to the atomic counter.
758 * The value to be subtracted from the counter.
761 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
764 * Atomically increment a 64-bit counter by one and test.
767 * A pointer to the atomic counter.
770 rte_atomic64_inc(rte_atomic64_t *v);
773 * Atomically decrement a 64-bit counter by one and test.
776 * A pointer to the atomic counter.
779 rte_atomic64_dec(rte_atomic64_t *v);
782 * Add a 64-bit value to an atomic counter and return the result.
784 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
785 * returns the value of v after the addition.
788 * A pointer to the atomic counter.
790 * The value to be added to the counter.
792 * The value of v after the addition.
794 static inline int64_t
795 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
798 * Subtract a 64-bit value from an atomic counter and return the result.
800 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
801 * and returns the value of v after the subtraction.
804 * A pointer to the atomic counter.
806 * The value to be subtracted from the counter.
808 * The value of v after the subtraction.
810 static inline int64_t
811 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
814 * Atomically increment a 64-bit counter by one and test.
816 * Atomically increments the atomic counter (v) by one and returns
817 * true if the result is 0, or false in all other cases.
820 * A pointer to the atomic counter.
822 * True if the result after the addition is 0; false otherwise.
825 rte_atomic64_inc_and_test(rte_atomic64_t *v);
828 * Atomically decrement a 64-bit counter by one and test.
830 * Atomically decrements the atomic counter (v) by one and returns true if
831 * the result is 0, or false in all other cases.
834 * A pointer to the atomic counter.
836 * True if the result after subtraction is 0; false otherwise.
839 rte_atomic64_dec_and_test(rte_atomic64_t *v);
842 * Atomically test and set a 64-bit atomic counter.
844 * If the counter value is already set, return 0 (failed). Otherwise, set
845 * the counter value to 1 and return 1 (success).
848 * A pointer to the atomic counter.
850 * 0 if failed; else 1, success.
853 rte_atomic64_test_and_set(rte_atomic64_t *v);
856 * Atomically set a 64-bit counter to 0.
859 * A pointer to the atomic counter.
862 rte_atomic64_clear(rte_atomic64_t *v);
864 #endif /* __DOXYGEN__ */
866 #else /*RTE_FORCE_INTRINSICS */
868 /*------------------------- 64 bit atomic operations -------------------------*/
871 * An atomic compare and set function used by the mutex functions.
872 * (atomic) equivalent to:
874 * *dst = src (all 64-bit words)
877 * The destination into which the value will be written.
879 * The expected value.
883 * Non-zero on success; 0 on failure.
886 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
888 return __sync_bool_compare_and_swap(dst, exp, src);
892 * The atomic counter structure.
895 volatile int64_t cnt; /**< Internal counter value. */
899 * Static initializer for an atomic counter.
901 #define RTE_ATOMIC64_INIT(val) { (val) }
904 * Initialize the atomic counter.
907 * A pointer to the atomic counter.
910 rte_atomic64_init(rte_atomic64_t *v)
918 while (success == 0) {
920 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
927 * Atomically read a 64-bit counter.
930 * A pointer to the atomic counter.
932 * The value of the counter.
934 static inline int64_t
935 rte_atomic64_read(rte_atomic64_t *v)
943 while (success == 0) {
945 /* replace the value by itself */
946 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
954 * Atomically set a 64-bit counter.
957 * A pointer to the atomic counter.
959 * The new value of the counter.
962 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
970 while (success == 0) {
972 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
979 * Atomically add a 64-bit value to a counter.
982 * A pointer to the atomic counter.
984 * The value to be added to the counter.
987 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
989 __sync_fetch_and_add(&v->cnt, inc);
993 * Atomically subtract a 64-bit value from a counter.
996 * A pointer to the atomic counter.
998 * The value to be subtracted from the counter.
1001 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
1003 __sync_fetch_and_sub(&v->cnt, dec);
1007 * Atomically increment a 64-bit counter by one and test.
1010 * A pointer to the atomic counter.
1013 rte_atomic64_inc(rte_atomic64_t *v)
1015 rte_atomic64_add(v, 1);
1019 * Atomically decrement a 64-bit counter by one and test.
1022 * A pointer to the atomic counter.
1025 rte_atomic64_dec(rte_atomic64_t *v)
1027 rte_atomic64_sub(v, 1);
1031 * Add a 64-bit value to an atomic counter and return the result.
1033 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
1034 * returns the value of v after the addition.
1037 * A pointer to the atomic counter.
1039 * The value to be added to the counter.
1041 * The value of v after the addition.
1043 static inline int64_t
1044 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
1046 return __sync_add_and_fetch(&v->cnt, inc);
1050 * Subtract a 64-bit value from an atomic counter and return the result.
1052 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
1053 * and returns the value of v after the subtraction.
1056 * A pointer to the atomic counter.
1058 * The value to be subtracted from the counter.
1060 * The value of v after the subtraction.
1062 static inline int64_t
1063 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1065 return __sync_sub_and_fetch(&v->cnt, dec);
1069 * Atomically increment a 64-bit counter by one and test.
1071 * Atomically increments the atomic counter (v) by one and returns
1072 * true if the result is 0, or false in all other cases.
1075 * A pointer to the atomic counter.
1077 * True if the result after the addition is 0; false otherwise.
1079 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1081 return rte_atomic64_add_return(v, 1) == 0;
1085 * Atomically decrement a 64-bit counter by one and test.
1087 * Atomically decrements the atomic counter (v) by one and returns true if
1088 * the result is 0, or false in all other cases.
1091 * A pointer to the atomic counter.
1093 * True if the result after subtraction is 0; false otherwise.
1095 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1097 return rte_atomic64_sub_return(v, 1) == 0;
1101 * Atomically test and set a 64-bit atomic counter.
1103 * If the counter value is already set, return 0 (failed). Otherwise, set
1104 * the counter value to 1 and return 1 (success).
1107 * A pointer to the atomic counter.
1109 * 0 if failed; else 1, success.
1111 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1113 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1117 * Atomically set a 64-bit counter to 0.
1120 * A pointer to the atomic counter.
1122 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1124 rte_atomic64_set(v, 0);
1127 #endif /*RTE_FORCE_INTRINSICS */
1133 #endif /* _RTE_ATOMIC_H_ */