4 * Copyright(c) 2010-2013 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 #ifndef _RTE_ATOMIC_H_
36 #define _RTE_ATOMIC_H_
42 * This file defines a generic API for atomic
43 * operations. The implementation is architecture-specific.
45 * See lib/librte_eal/common/include/i686/arch/rte_atomic.h
46 * See lib/librte_eal/common/include/x86_64/arch/rte_atomic.h
55 #if RTE_MAX_LCORE == 1
56 #define MPLOCKED /**< No need to insert MP lock prefix. */
58 #define MPLOCKED "lock ; " /**< Insert MP lock prefix. */
62 * General memory barrier.
64 * Guarantees that the LOAD and STORE operations generated before the
65 * barrier occur before the LOAD and STORE operations generated after.
67 #define rte_mb() _mm_mfence()
70 * Write memory barrier.
72 * Guarantees that the STORE operations generated before the barrier
73 * occur before the STORE operations generated after.
75 #define rte_wmb() _mm_sfence()
78 * Read memory barrier.
80 * Guarantees that the LOAD operations generated before the barrier
81 * occur before the LOAD operations generated after.
83 #define rte_rmb() _mm_lfence()
85 #include <emmintrin.h>
89 * Atomic Operations on x86_64
92 /*------------------------- 16 bit atomic operations -------------------------*/
95 * Atomic compare and set.
97 * (atomic) equivalent to:
99 * *dst = src (all 16-bit words)
102 * The destination location into which the value will be written.
104 * The expected value.
108 * Non-zero on success; 0 on failure.
111 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
113 #ifndef RTE_FORCE_INTRINSICS
118 "cmpxchgw %[src], %[dst];"
120 : [res] "=a" (res), /* output */
122 : [src] "r" (src), /* input */
125 : "memory"); /* no-clobber list */
128 return __sync_bool_compare_and_swap(dst, exp, src);
133 * The atomic counter structure.
136 volatile int16_t cnt; /**< An internal counter value. */
140 * Static initializer for an atomic counter.
142 #define RTE_ATOMIC16_INIT(val) { (val) }
145 * Initialize an atomic counter.
148 * A pointer to the atomic counter.
151 rte_atomic16_init(rte_atomic16_t *v)
157 * Atomically read a 16-bit value from a counter.
160 * A pointer to the atomic counter.
162 * The value of the counter.
164 static inline int16_t
165 rte_atomic16_read(const rte_atomic16_t *v)
171 * Atomically set a counter to a 16-bit value.
174 * A pointer to the atomic counter.
176 * The new value for the counter.
179 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
185 * Atomically add a 16-bit value to an atomic counter.
188 * A pointer to the atomic counter.
190 * The value to be added to the counter.
193 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
195 __sync_fetch_and_add(&v->cnt, inc);
199 * Atomically subtract a 16-bit value from an atomic counter.
202 * A pointer to the atomic counter.
204 * The value to be subtracted from the counter.
207 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
209 __sync_fetch_and_sub(&v->cnt, dec);
213 * Atomically increment a counter by one.
216 * A pointer to the atomic counter.
219 rte_atomic16_inc(rte_atomic16_t *v)
221 #ifndef RTE_FORCE_INTRINSICS
225 : [cnt] "=m" (v->cnt) /* output */
226 : "m" (v->cnt) /* input */
229 rte_atomic16_add(v, 1);
234 * Atomically decrement a counter by one.
237 * A pointer to the atomic counter.
240 rte_atomic16_dec(rte_atomic16_t *v)
242 #ifndef RTE_FORCE_INTRINSICS
246 : [cnt] "=m" (v->cnt) /* output */
247 : "m" (v->cnt) /* input */
250 rte_atomic16_sub(v, 1);
255 * Atomically add a 16-bit value to a counter and return the result.
257 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
258 * returns the value of v after addition.
261 * A pointer to the atomic counter.
263 * The value to be added to the counter.
265 * The value of v after the addition.
267 static inline int16_t
268 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
270 return __sync_add_and_fetch(&v->cnt, inc);
274 * Atomically subtract a 16-bit value from a counter and return
277 * Atomically subtracts the 16-bit value (inc) from the atomic counter
278 * (v) and returns the value of v after the subtraction.
281 * A pointer to the atomic counter.
283 * The value to be subtracted from the counter.
285 * The value of v after the subtraction.
287 static inline int16_t
288 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
290 return __sync_sub_and_fetch(&v->cnt, dec);
294 * Atomically increment a 16-bit counter by one and test.
296 * Atomically increments the atomic counter (v) by one and returns true if
297 * the result is 0, or false in all other cases.
300 * A pointer to the atomic counter.
302 * True if the result after the increment operation is 0; false otherwise.
304 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
306 #ifndef RTE_FORCE_INTRINSICS
313 : [cnt] "+m" (v->cnt), /* output */
318 return (__sync_add_and_fetch(&v->cnt, 1) == 0);
323 * Atomically decrement a 16-bit counter by one and test.
325 * Atomically decrements the atomic counter (v) by one and returns true if
326 * the result is 0, or false in all other cases.
329 * A pointer to the atomic counter.
331 * True if the result after the decrement operation is 0; false otherwise.
333 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
335 #ifndef RTE_FORCE_INTRINSICS
338 asm volatile(MPLOCKED
341 : [cnt] "+m" (v->cnt), /* output */
346 return (__sync_sub_and_fetch(&v->cnt, 1) == 0);
351 * Atomically test and set a 16-bit atomic counter.
353 * If the counter value is already set, return 0 (failed). Otherwise, set
354 * the counter value to 1 and return 1 (success).
357 * A pointer to the atomic counter.
359 * 0 if failed; else 1, success.
361 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
363 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
367 * Atomically set a 16-bit counter to 0.
370 * A pointer to the atomic counter.
372 static inline void rte_atomic16_clear(rte_atomic16_t *v)
377 /*------------------------- 32 bit atomic operations -------------------------*/
380 * Atomic compare and set.
382 * (atomic) equivalent to:
384 * *dst = src (all 32-bit words)
387 * The destination location into which the value will be written.
389 * The expected value.
393 * Non-zero on success; 0 on failure.
396 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
398 #ifndef RTE_FORCE_INTRINSICS
403 "cmpxchgl %[src], %[dst];"
405 : [res] "=a" (res), /* output */
407 : [src] "r" (src), /* input */
410 : "memory"); /* no-clobber list */
413 return __sync_bool_compare_and_swap(dst, exp, src);
418 * The atomic counter structure.
421 volatile int32_t cnt; /**< An internal counter value. */
425 * Static initializer for an atomic counter.
427 #define RTE_ATOMIC32_INIT(val) { (val) }
430 * Initialize an atomic counter.
433 * A pointer to the atomic counter.
436 rte_atomic32_init(rte_atomic32_t *v)
442 * Atomically read a 32-bit value from a counter.
445 * A pointer to the atomic counter.
447 * The value of the counter.
449 static inline int32_t
450 rte_atomic32_read(const rte_atomic32_t *v)
456 * Atomically set a counter to a 32-bit value.
459 * A pointer to the atomic counter.
461 * The new value for the counter.
464 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
470 * Atomically add a 32-bit value to an atomic counter.
473 * A pointer to the atomic counter.
475 * The value to be added to the counter.
478 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
480 __sync_fetch_and_add(&v->cnt, inc);
484 * Atomically subtract a 32-bit value from an atomic counter.
487 * A pointer to the atomic counter.
489 * The value to be subtracted from the counter.
492 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
494 __sync_fetch_and_sub(&v->cnt, dec);
498 * Atomically increment a counter by one.
501 * A pointer to the atomic counter.
504 rte_atomic32_inc(rte_atomic32_t *v)
506 #ifndef RTE_FORCE_INTRINSICS
510 : [cnt] "=m" (v->cnt) /* output */
511 : "m" (v->cnt) /* input */
514 rte_atomic32_add(v, 1);
519 * Atomically decrement a counter by one.
522 * A pointer to the atomic counter.
525 rte_atomic32_dec(rte_atomic32_t *v)
527 #ifndef RTE_FORCE_INTRINSICS
531 : [cnt] "=m" (v->cnt) /* output */
532 : "m" (v->cnt) /* input */
535 rte_atomic32_sub(v,1);
540 * Atomically add a 32-bit value to a counter and return the result.
542 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
543 * returns the value of v after addition.
546 * A pointer to the atomic counter.
548 * The value to be added to the counter.
550 * The value of v after the addition.
552 static inline int32_t
553 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
555 return __sync_add_and_fetch(&v->cnt, inc);
559 * Atomically subtract a 32-bit value from a counter and return
562 * Atomically subtracts the 32-bit value (inc) from the atomic counter
563 * (v) and returns the value of v after the subtraction.
566 * A pointer to the atomic counter.
568 * The value to be subtracted from the counter.
570 * The value of v after the subtraction.
572 static inline int32_t
573 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
575 return __sync_sub_and_fetch(&v->cnt, dec);
579 * Atomically increment a 32-bit counter by one and test.
581 * Atomically increments the atomic counter (v) by one and returns true if
582 * the result is 0, or false in all other cases.
585 * A pointer to the atomic counter.
587 * True if the result after the increment operation is 0; false otherwise.
589 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
591 #ifndef RTE_FORCE_INTRINSICS
598 : [cnt] "+m" (v->cnt), /* output */
603 return (__sync_add_and_fetch(&v->cnt, 1) == 0);
608 * Atomically decrement a 32-bit counter by one and test.
610 * Atomically decrements the atomic counter (v) by one and returns true if
611 * the result is 0, or false in all other cases.
614 * A pointer to the atomic counter.
616 * True if the result after the decrement operation is 0; false otherwise.
618 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
620 #ifndef RTE_FORCE_INTRINSICS
623 asm volatile(MPLOCKED
626 : [cnt] "+m" (v->cnt), /* output */
631 return (__sync_sub_and_fetch(&v->cnt, 1) == 0);
636 * Atomically test and set a 32-bit atomic counter.
638 * If the counter value is already set, return 0 (failed). Otherwise, set
639 * the counter value to 1 and return 1 (success).
642 * A pointer to the atomic counter.
644 * 0 if failed; else 1, success.
646 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
648 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
652 * Atomically set a 32-bit counter to 0.
655 * A pointer to the atomic counter.
657 static inline void rte_atomic32_clear(rte_atomic32_t *v)
662 #ifndef RTE_FORCE_INTRINSICS
663 /* any other functions are in arch specific files */
664 #include "arch/rte_atomic.h"
669 /*------------------------- 64 bit atomic operations -------------------------*/
672 * An atomic compare and set function used by the mutex functions.
673 * (atomic) equivalent to:
675 * *dst = src (all 64-bit words)
678 * The destination into which the value will be written.
680 * The expected value.
684 * Non-zero on success; 0 on failure.
687 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
690 * The atomic counter structure.
693 volatile int64_t cnt; /**< Internal counter value. */
697 * Static initializer for an atomic counter.
699 #define RTE_ATOMIC64_INIT(val) { (val) }
702 * Initialize the atomic counter.
705 * A pointer to the atomic counter.
708 rte_atomic64_init(rte_atomic64_t *v);
711 * Atomically read a 64-bit counter.
714 * A pointer to the atomic counter.
716 * The value of the counter.
718 static inline int64_t
719 rte_atomic64_read(rte_atomic64_t *v);
722 * Atomically set a 64-bit counter.
725 * A pointer to the atomic counter.
727 * The new value of the counter.
730 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
733 * Atomically add a 64-bit value to a counter.
736 * A pointer to the atomic counter.
738 * The value to be added to the counter.
741 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
744 * Atomically subtract a 64-bit value from a counter.
747 * A pointer to the atomic counter.
749 * The value to be subtracted from the counter.
752 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
755 * Atomically increment a 64-bit counter by one and test.
758 * A pointer to the atomic counter.
761 rte_atomic64_inc(rte_atomic64_t *v);
764 * Atomically decrement a 64-bit counter by one and test.
767 * A pointer to the atomic counter.
770 rte_atomic64_dec(rte_atomic64_t *v);
773 * Add a 64-bit value to an atomic counter and return the result.
775 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
776 * returns the value of v after the addition.
779 * A pointer to the atomic counter.
781 * The value to be added to the counter.
783 * The value of v after the addition.
785 static inline int64_t
786 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
789 * Subtract a 64-bit value from an atomic counter and return the result.
791 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
792 * and returns the value of v after the subtraction.
795 * A pointer to the atomic counter.
797 * The value to be subtracted from the counter.
799 * The value of v after the subtraction.
801 static inline int64_t
802 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
805 * Atomically increment a 64-bit counter by one and test.
807 * Atomically increments the atomic counter (v) by one and returns
808 * true if the result is 0, or false in all other cases.
811 * A pointer to the atomic counter.
813 * True if the result after the addition is 0; false otherwise.
816 rte_atomic64_inc_and_test(rte_atomic64_t *v);
819 * Atomically decrement a 64-bit counter by one and test.
821 * Atomically decrements the atomic counter (v) by one and returns true if
822 * the result is 0, or false in all other cases.
825 * A pointer to the atomic counter.
827 * True if the result after subtraction is 0; false otherwise.
830 rte_atomic64_dec_and_test(rte_atomic64_t *v);
833 * Atomically test and set a 64-bit atomic counter.
835 * If the counter value is already set, return 0 (failed). Otherwise, set
836 * the counter value to 1 and return 1 (success).
839 * A pointer to the atomic counter.
841 * 0 if failed; else 1, success.
844 rte_atomic64_test_and_set(rte_atomic64_t *v);
847 * Atomically set a 64-bit counter to 0.
850 * A pointer to the atomic counter.
853 rte_atomic64_clear(rte_atomic64_t *v);
855 #endif /* __DOXYGEN__ */
857 #else /*RTE_FORCE_INTRINSICS */
859 /*------------------------- 64 bit atomic operations -------------------------*/
862 * An atomic compare and set function used by the mutex functions.
863 * (atomic) equivalent to:
865 * *dst = src (all 64-bit words)
868 * The destination into which the value will be written.
870 * The expected value.
874 * Non-zero on success; 0 on failure.
877 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
879 return __sync_bool_compare_and_swap(dst, exp, src);
883 * The atomic counter structure.
886 volatile int64_t cnt; /**< Internal counter value. */
890 * Static initializer for an atomic counter.
892 #define RTE_ATOMIC64_INIT(val) { (val) }
895 * Initialize the atomic counter.
898 * A pointer to the atomic counter.
901 rte_atomic64_init(rte_atomic64_t *v)
909 while (success == 0) {
911 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
918 * Atomically read a 64-bit counter.
921 * A pointer to the atomic counter.
923 * The value of the counter.
925 static inline int64_t
926 rte_atomic64_read(rte_atomic64_t *v)
934 while (success == 0) {
936 /* replace the value by itself */
937 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
945 * Atomically set a 64-bit counter.
948 * A pointer to the atomic counter.
950 * The new value of the counter.
953 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
961 while (success == 0) {
963 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
970 * Atomically add a 64-bit value to a counter.
973 * A pointer to the atomic counter.
975 * The value to be added to the counter.
978 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
980 __sync_fetch_and_add(&v->cnt, inc);
984 * Atomically subtract a 64-bit value from a counter.
987 * A pointer to the atomic counter.
989 * The value to be substracted from the counter.
992 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
994 __sync_fetch_and_sub(&v->cnt, dec);
998 * Atomically increment a 64-bit counter by one and test.
1001 * A pointer to the atomic counter.
1004 rte_atomic64_inc(rte_atomic64_t *v)
1006 rte_atomic64_add(v, 1);
1010 * Atomically decrement a 64-bit counter by one and test.
1013 * A pointer to the atomic counter.
1016 rte_atomic64_dec(rte_atomic64_t *v)
1018 rte_atomic64_sub(v, 1);
1022 * Add a 64-bit value to an atomic counter and return the result.
1024 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
1025 * returns the value of v after the addition.
1028 * A pointer to the atomic counter.
1030 * The value to be added to the counter.
1032 * The value of v after the addition.
1034 static inline int64_t
1035 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
1037 return __sync_add_and_fetch(&v->cnt, inc);
1041 * Subtract a 64-bit value from an atomic counter and return the result.
1043 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
1044 * and returns the value of v after the substraction.
1047 * A pointer to the atomic counter.
1049 * The value to be substracted from the counter.
1051 * The value of v after the substraction.
1053 static inline int64_t
1054 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1056 return __sync_sub_and_fetch(&v->cnt, dec);
1060 * Atomically increment a 64-bit counter by one and test.
1062 * Atomically increments the atomic counter (v) by one and returns
1063 * true if the result is 0, or false in all other cases.
1066 * A pointer to the atomic counter.
1068 * True if the result after the addition is 0; false otherwise.
1070 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1072 return rte_atomic64_add_return(v, 1) == 0;
1076 * Atomically decrement a 64-bit counter by one and test.
1078 * Atomically decrements the atomic counter (v) by one and returns true if
1079 * the result is 0, or false in all other cases.
1082 * A pointer to the atomic counter.
1084 * True if the result after substraction is 0; false otherwise.
1086 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1088 return rte_atomic64_sub_return(v, 1) == 0;
1092 * Atomically test and set a 64-bit atomic counter.
1094 * If the counter value is already set, return 0 (failed). Otherwise, set
1095 * the counter value to 1 and return 1 (success).
1098 * A pointer to the atomic counter.
1100 * 0 if failed; else 1, success.
1102 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1104 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1108 * Atomically set a 64-bit counter to 0.
1111 * A pointer to the atomic counter.
1113 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1115 rte_atomic64_set(v, 0);
1118 #endif /*RTE_FORCE_INTRINSICS */
1124 #endif /* _RTE_ATOMIC_H_ */