4 * Copyright(c) 2010-2013 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
41 * This file defines a generic API for atomic
42 * operations. The implementation is architecture-specific.
44 * See lib/librte_eal/common/include/i686/arch/rte_atomic.h
45 * See lib/librte_eal/common/include/x86_64/arch/rte_atomic.h
54 #if RTE_MAX_LCORE == 1
55 #define MPLOCKED /**< No need to insert MP lock prefix. */
57 #define MPLOCKED "lock ; " /**< Insert MP lock prefix. */
61 * General memory barrier.
63 * Guarantees that the LOAD and STORE operations generated before the
64 * barrier occur before the LOAD and STORE operations generated after.
66 #define rte_mb() _mm_mfence()
69 * Write memory barrier.
71 * Guarantees that the STORE operations generated before the barrier
72 * occur before the STORE operations generated after.
74 #define rte_wmb() _mm_sfence()
77 * Read memory barrier.
79 * Guarantees that the LOAD operations generated before the barrier
80 * occur before the LOAD operations generated after.
82 #define rte_rmb() _mm_lfence()
84 #include <emmintrin.h>
88 * Atomic Operations on x86_64
91 /*------------------------- 16 bit atomic operations -------------------------*/
94 * Atomic compare and set.
96 * (atomic) equivalent to:
98 * *dst = src (all 16-bit words)
101 * The destination location into which the value will be written.
103 * The expected value.
107 * Non-zero on success; 0 on failure.
110 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
112 #ifndef RTE_FORCE_INTRINSICS
117 "cmpxchgw %[src], %[dst];"
119 : [res] "=a" (res), /* output */
121 : [src] "r" (src), /* input */
124 : "memory"); /* no-clobber list */
127 return __sync_bool_compare_and_swap(dst, exp, src);
132 * The atomic counter structure.
135 volatile int16_t cnt; /**< An internal counter value. */
139 * Static initializer for an atomic counter.
141 #define RTE_ATOMIC16_INIT(val) { (val) }
144 * Initialize an atomic counter.
147 * A pointer to the atomic counter.
150 rte_atomic16_init(rte_atomic16_t *v)
156 * Atomically read a 16-bit value from a counter.
159 * A pointer to the atomic counter.
161 * The value of the counter.
163 static inline int16_t
164 rte_atomic16_read(const rte_atomic16_t *v)
170 * Atomically set a counter to a 16-bit value.
173 * A pointer to the atomic counter.
175 * The new value for the counter.
178 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
184 * Atomically add a 16-bit value to an atomic counter.
187 * A pointer to the atomic counter.
189 * The value to be added to the counter.
192 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
194 __sync_fetch_and_add(&v->cnt, inc);
198 * Atomically subtract a 16-bit value from an atomic counter.
201 * A pointer to the atomic counter.
203 * The value to be subtracted from the counter.
206 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
208 __sync_fetch_and_sub(&v->cnt, dec);
212 * Atomically increment a counter by one.
215 * A pointer to the atomic counter.
218 rte_atomic16_inc(rte_atomic16_t *v)
220 #ifndef RTE_FORCE_INTRINSICS
224 : [cnt] "=m" (v->cnt) /* output */
225 : "m" (v->cnt) /* input */
228 rte_atomic16_add(v, 1);
233 * Atomically decrement a counter by one.
236 * A pointer to the atomic counter.
239 rte_atomic16_dec(rte_atomic16_t *v)
241 #ifndef RTE_FORCE_INTRINSICS
245 : [cnt] "=m" (v->cnt) /* output */
246 : "m" (v->cnt) /* input */
249 rte_atomic16_sub(v, 1);
254 * Atomically add a 16-bit value to a counter and return the result.
256 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
257 * returns the value of v after addition.
260 * A pointer to the atomic counter.
262 * The value to be added to the counter.
264 * The value of v after the addition.
266 static inline int16_t
267 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
269 return __sync_add_and_fetch(&v->cnt, inc);
273 * Atomically subtract a 16-bit value from a counter and return
276 * Atomically subtracts the 16-bit value (inc) from the atomic counter
277 * (v) and returns the value of v after the subtraction.
280 * A pointer to the atomic counter.
282 * The value to be subtracted from the counter.
284 * The value of v after the subtraction.
286 static inline int16_t
287 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
289 return __sync_sub_and_fetch(&v->cnt, dec);
293 * Atomically increment a 16-bit counter by one and test.
295 * Atomically increments the atomic counter (v) by one and returns true if
296 * the result is 0, or false in all other cases.
299 * A pointer to the atomic counter.
301 * True if the result after the increment operation is 0; false otherwise.
303 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
305 #ifndef RTE_FORCE_INTRINSICS
312 : [cnt] "+m" (v->cnt), /* output */
317 return (__sync_add_and_fetch(&v->cnt, 1) == 0);
322 * Atomically decrement a 16-bit counter by one and test.
324 * Atomically decrements the atomic counter (v) by one and returns true if
325 * the result is 0, or false in all other cases.
328 * A pointer to the atomic counter.
330 * True if the result after the decrement operation is 0; false otherwise.
332 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
334 #ifndef RTE_FORCE_INTRINSICS
337 asm volatile(MPLOCKED
340 : [cnt] "+m" (v->cnt), /* output */
345 return (__sync_sub_and_fetch(&v->cnt, 1) == 0);
350 * Atomically test and set a 16-bit atomic counter.
352 * If the counter value is already set, return 0 (failed). Otherwise, set
353 * the counter value to 1 and return 1 (success).
356 * A pointer to the atomic counter.
358 * 0 if failed; else 1, success.
360 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
362 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
366 * Atomically set a 16-bit counter to 0.
369 * A pointer to the atomic counter.
371 static inline void rte_atomic16_clear(rte_atomic16_t *v)
376 /*------------------------- 32 bit atomic operations -------------------------*/
379 * Atomic compare and set.
381 * (atomic) equivalent to:
383 * *dst = src (all 32-bit words)
386 * The destination location into which the value will be written.
388 * The expected value.
392 * Non-zero on success; 0 on failure.
395 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
397 #ifndef RTE_FORCE_INTRINSICS
402 "cmpxchgl %[src], %[dst];"
404 : [res] "=a" (res), /* output */
406 : [src] "r" (src), /* input */
409 : "memory"); /* no-clobber list */
412 return __sync_bool_compare_and_swap(dst, exp, src);
417 * The atomic counter structure.
420 volatile int32_t cnt; /**< An internal counter value. */
424 * Static initializer for an atomic counter.
426 #define RTE_ATOMIC32_INIT(val) { (val) }
429 * Initialize an atomic counter.
432 * A pointer to the atomic counter.
435 rte_atomic32_init(rte_atomic32_t *v)
441 * Atomically read a 32-bit value from a counter.
444 * A pointer to the atomic counter.
446 * The value of the counter.
448 static inline int32_t
449 rte_atomic32_read(const rte_atomic32_t *v)
455 * Atomically set a counter to a 32-bit value.
458 * A pointer to the atomic counter.
460 * The new value for the counter.
463 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
469 * Atomically add a 32-bit value to an atomic counter.
472 * A pointer to the atomic counter.
474 * The value to be added to the counter.
477 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
479 __sync_fetch_and_add(&v->cnt, inc);
483 * Atomically subtract a 32-bit value from an atomic counter.
486 * A pointer to the atomic counter.
488 * The value to be subtracted from the counter.
491 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
493 __sync_fetch_and_sub(&v->cnt, dec);
497 * Atomically increment a counter by one.
500 * A pointer to the atomic counter.
503 rte_atomic32_inc(rte_atomic32_t *v)
505 #ifndef RTE_FORCE_INTRINSICS
509 : [cnt] "=m" (v->cnt) /* output */
510 : "m" (v->cnt) /* input */
513 rte_atomic32_add(v, 1);
518 * Atomically decrement a counter by one.
521 * A pointer to the atomic counter.
524 rte_atomic32_dec(rte_atomic32_t *v)
526 #ifndef RTE_FORCE_INTRINSICS
530 : [cnt] "=m" (v->cnt) /* output */
531 : "m" (v->cnt) /* input */
534 rte_atomic32_sub(v,1);
539 * Atomically add a 32-bit value to a counter and return the result.
541 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
542 * returns the value of v after addition.
545 * A pointer to the atomic counter.
547 * The value to be added to the counter.
549 * The value of v after the addition.
551 static inline int32_t
552 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
554 return __sync_add_and_fetch(&v->cnt, inc);
558 * Atomically subtract a 32-bit value from a counter and return
561 * Atomically subtracts the 32-bit value (inc) from the atomic counter
562 * (v) and returns the value of v after the subtraction.
565 * A pointer to the atomic counter.
567 * The value to be subtracted from the counter.
569 * The value of v after the subtraction.
571 static inline int32_t
572 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
574 return __sync_sub_and_fetch(&v->cnt, dec);
578 * Atomically increment a 32-bit counter by one and test.
580 * Atomically increments the atomic counter (v) by one and returns true if
581 * the result is 0, or false in all other cases.
584 * A pointer to the atomic counter.
586 * True if the result after the increment operation is 0; false otherwise.
588 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
590 #ifndef RTE_FORCE_INTRINSICS
597 : [cnt] "+m" (v->cnt), /* output */
602 return (__sync_add_and_fetch(&v->cnt, 1) == 0);
607 * Atomically decrement a 32-bit counter by one and test.
609 * Atomically decrements the atomic counter (v) by one and returns true if
610 * the result is 0, or false in all other cases.
613 * A pointer to the atomic counter.
615 * True if the result after the decrement operation is 0; false otherwise.
617 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
619 #ifndef RTE_FORCE_INTRINSICS
622 asm volatile(MPLOCKED
625 : [cnt] "+m" (v->cnt), /* output */
630 return (__sync_sub_and_fetch(&v->cnt, 1) == 0);
635 * Atomically test and set a 32-bit atomic counter.
637 * If the counter value is already set, return 0 (failed). Otherwise, set
638 * the counter value to 1 and return 1 (success).
641 * A pointer to the atomic counter.
643 * 0 if failed; else 1, success.
645 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
647 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
651 * Atomically set a 32-bit counter to 0.
654 * A pointer to the atomic counter.
656 static inline void rte_atomic32_clear(rte_atomic32_t *v)
661 #ifndef RTE_FORCE_INTRINSICS
662 /* any other functions are in arch specific files */
663 #include "arch/rte_atomic.h"
668 /*------------------------- 64 bit atomic operations -------------------------*/
671 * An atomic compare and set function used by the mutex functions.
672 * (atomic) equivalent to:
674 * *dst = src (all 64-bit words)
677 * The destination into which the value will be written.
679 * The expected value.
683 * Non-zero on success; 0 on failure.
686 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
689 * The atomic counter structure.
692 volatile int64_t cnt; /**< Internal counter value. */
696 * Static initializer for an atomic counter.
698 #define RTE_ATOMIC64_INIT(val) { (val) }
701 * Initialize the atomic counter.
704 * A pointer to the atomic counter.
707 rte_atomic64_init(rte_atomic64_t *v);
710 * Atomically read a 64-bit counter.
713 * A pointer to the atomic counter.
715 * The value of the counter.
717 static inline int64_t
718 rte_atomic64_read(rte_atomic64_t *v);
721 * Atomically set a 64-bit counter.
724 * A pointer to the atomic counter.
726 * The new value of the counter.
729 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
732 * Atomically add a 64-bit value to a counter.
735 * A pointer to the atomic counter.
737 * The value to be added to the counter.
740 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
743 * Atomically subtract a 64-bit value from a counter.
746 * A pointer to the atomic counter.
748 * The value to be subtracted from the counter.
751 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
754 * Atomically increment a 64-bit counter by one and test.
757 * A pointer to the atomic counter.
760 rte_atomic64_inc(rte_atomic64_t *v);
763 * Atomically decrement a 64-bit counter by one and test.
766 * A pointer to the atomic counter.
769 rte_atomic64_dec(rte_atomic64_t *v);
772 * Add a 64-bit value to an atomic counter and return the result.
774 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
775 * returns the value of v after the addition.
778 * A pointer to the atomic counter.
780 * The value to be added to the counter.
782 * The value of v after the addition.
784 static inline int64_t
785 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
788 * Subtract a 64-bit value from an atomic counter and return the result.
790 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
791 * and returns the value of v after the subtraction.
794 * A pointer to the atomic counter.
796 * The value to be subtracted from the counter.
798 * The value of v after the subtraction.
800 static inline int64_t
801 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
804 * Atomically increment a 64-bit counter by one and test.
806 * Atomically increments the atomic counter (v) by one and returns
807 * true if the result is 0, or false in all other cases.
810 * A pointer to the atomic counter.
812 * True if the result after the addition is 0; false otherwise.
815 rte_atomic64_inc_and_test(rte_atomic64_t *v);
818 * Atomically decrement a 64-bit counter by one and test.
820 * Atomically decrements the atomic counter (v) by one and returns true if
821 * the result is 0, or false in all other cases.
824 * A pointer to the atomic counter.
826 * True if the result after subtraction is 0; false otherwise.
829 rte_atomic64_dec_and_test(rte_atomic64_t *v);
832 * Atomically test and set a 64-bit atomic counter.
834 * If the counter value is already set, return 0 (failed). Otherwise, set
835 * the counter value to 1 and return 1 (success).
838 * A pointer to the atomic counter.
840 * 0 if failed; else 1, success.
843 rte_atomic64_test_and_set(rte_atomic64_t *v);
846 * Atomically set a 64-bit counter to 0.
849 * A pointer to the atomic counter.
852 rte_atomic64_clear(rte_atomic64_t *v);
854 #endif /* __DOXYGEN__ */
856 #else /*RTE_FORCE_INTRINSICS */
858 /*------------------------- 64 bit atomic operations -------------------------*/
861 * An atomic compare and set function used by the mutex functions.
862 * (atomic) equivalent to:
864 * *dst = src (all 64-bit words)
867 * The destination into which the value will be written.
869 * The expected value.
873 * Non-zero on success; 0 on failure.
876 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
878 return __sync_bool_compare_and_swap(dst, exp, src);
882 * The atomic counter structure.
885 volatile int64_t cnt; /**< Internal counter value. */
889 * Static initializer for an atomic counter.
891 #define RTE_ATOMIC64_INIT(val) { (val) }
894 * Initialize the atomic counter.
897 * A pointer to the atomic counter.
900 rte_atomic64_init(rte_atomic64_t *v)
908 while (success == 0) {
910 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
917 * Atomically read a 64-bit counter.
920 * A pointer to the atomic counter.
922 * The value of the counter.
924 static inline int64_t
925 rte_atomic64_read(rte_atomic64_t *v)
933 while (success == 0) {
935 /* replace the value by itself */
936 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
944 * Atomically set a 64-bit counter.
947 * A pointer to the atomic counter.
949 * The new value of the counter.
952 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
960 while (success == 0) {
962 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
969 * Atomically add a 64-bit value to a counter.
972 * A pointer to the atomic counter.
974 * The value to be added to the counter.
977 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
979 __sync_fetch_and_add(&v->cnt, inc);
983 * Atomically subtract a 64-bit value from a counter.
986 * A pointer to the atomic counter.
988 * The value to be substracted from the counter.
991 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
993 __sync_fetch_and_sub(&v->cnt, dec);
997 * Atomically increment a 64-bit counter by one and test.
1000 * A pointer to the atomic counter.
1003 rte_atomic64_inc(rte_atomic64_t *v)
1005 rte_atomic64_add(v, 1);
1009 * Atomically decrement a 64-bit counter by one and test.
1012 * A pointer to the atomic counter.
1015 rte_atomic64_dec(rte_atomic64_t *v)
1017 rte_atomic64_sub(v, 1);
1021 * Add a 64-bit value to an atomic counter and return the result.
1023 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
1024 * returns the value of v after the addition.
1027 * A pointer to the atomic counter.
1029 * The value to be added to the counter.
1031 * The value of v after the addition.
1033 static inline int64_t
1034 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
1036 return __sync_add_and_fetch(&v->cnt, inc);
1040 * Subtract a 64-bit value from an atomic counter and return the result.
1042 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
1043 * and returns the value of v after the substraction.
1046 * A pointer to the atomic counter.
1048 * The value to be substracted from the counter.
1050 * The value of v after the substraction.
1052 static inline int64_t
1053 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1055 return __sync_sub_and_fetch(&v->cnt, dec);
1059 * Atomically increment a 64-bit counter by one and test.
1061 * Atomically increments the atomic counter (v) by one and returns
1062 * true if the result is 0, or false in all other cases.
1065 * A pointer to the atomic counter.
1067 * True if the result after the addition is 0; false otherwise.
1069 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1071 return rte_atomic64_add_return(v, 1) == 0;
1075 * Atomically decrement a 64-bit counter by one and test.
1077 * Atomically decrements the atomic counter (v) by one and returns true if
1078 * the result is 0, or false in all other cases.
1081 * A pointer to the atomic counter.
1083 * True if the result after substraction is 0; false otherwise.
1085 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1087 return rte_atomic64_sub_return(v, 1) == 0;
1091 * Atomically test and set a 64-bit atomic counter.
1093 * If the counter value is already set, return 0 (failed). Otherwise, set
1094 * the counter value to 1 and return 1 (success).
1097 * A pointer to the atomic counter.
1099 * 0 if failed; else 1, success.
1101 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1103 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1107 * Atomically set a 64-bit counter to 0.
1110 * A pointer to the atomic counter.
1112 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1114 rte_atomic64_set(v, 0);
1117 #endif /*RTE_FORCE_INTRINSICS */
1123 #endif /* _RTE_ATOMIC_H_ */