4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
41 * This file defines a generic API for atomic operations.
45 #include <rte_common.h>
50 * General memory barrier.
52 * Guarantees that the LOAD and STORE operations generated before the
53 * barrier occur before the LOAD and STORE operations generated after.
54 * This function is architecture dependent.
56 static inline void rte_mb(void);
59 * Write memory barrier.
61 * Guarantees that the STORE operations generated before the barrier
62 * occur before the STORE operations generated after.
63 * This function is architecture dependent.
65 static inline void rte_wmb(void);
68 * Read memory barrier.
70 * Guarantees that the LOAD operations generated before the barrier
71 * occur before the LOAD operations generated after.
72 * This function is architecture dependent.
74 static inline void rte_rmb(void);
77 * General memory barrier between lcores
79 * Guarantees that the LOAD and STORE operations that precede the
80 * rte_smp_mb() call are globally visible across the lcores
81 * before the the LOAD and STORE operations that follows it.
83 static inline void rte_smp_mb(void);
86 * Write memory barrier between lcores
88 * Guarantees that the STORE operations that precede the
89 * rte_smp_wmb() call are globally visible across the lcores
90 * before the the STORE operations that follows it.
92 static inline void rte_smp_wmb(void);
95 * Read memory barrier between lcores
97 * Guarantees that the LOAD operations that precede the
98 * rte_smp_rmb() call are globally visible across the lcores
99 * before the the LOAD operations that follows it.
101 static inline void rte_smp_rmb(void);
103 #endif /* __DOXYGEN__ */
108 * Guarantees that operation reordering does not occur at compile time
109 * for operations directly before and after the barrier.
111 #define rte_compiler_barrier() do { \
112 asm volatile ("" : : : "memory"); \
115 /*------------------------- 16 bit atomic operations -------------------------*/
118 * Atomic compare and set.
120 * (atomic) equivalent to:
122 * *dst = src (all 16-bit words)
125 * The destination location into which the value will be written.
127 * The expected value.
131 * Non-zero on success; 0 on failure.
134 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
136 #ifdef RTE_FORCE_INTRINSICS
138 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
140 return __sync_bool_compare_and_swap(dst, exp, src);
145 * The atomic counter structure.
148 volatile int16_t cnt; /**< An internal counter value. */
152 * Static initializer for an atomic counter.
154 #define RTE_ATOMIC16_INIT(val) { (val) }
157 * Initialize an atomic counter.
160 * A pointer to the atomic counter.
163 rte_atomic16_init(rte_atomic16_t *v)
169 * Atomically read a 16-bit value from a counter.
172 * A pointer to the atomic counter.
174 * The value of the counter.
176 static inline int16_t
177 rte_atomic16_read(const rte_atomic16_t *v)
183 * Atomically set a counter to a 16-bit value.
186 * A pointer to the atomic counter.
188 * The new value for the counter.
191 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
197 * Atomically add a 16-bit value to an atomic counter.
200 * A pointer to the atomic counter.
202 * The value to be added to the counter.
205 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
207 __sync_fetch_and_add(&v->cnt, inc);
211 * Atomically subtract a 16-bit value from an atomic counter.
214 * A pointer to the atomic counter.
216 * The value to be subtracted from the counter.
219 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
221 __sync_fetch_and_sub(&v->cnt, dec);
225 * Atomically increment a counter by one.
228 * A pointer to the atomic counter.
231 rte_atomic16_inc(rte_atomic16_t *v);
233 #ifdef RTE_FORCE_INTRINSICS
235 rte_atomic16_inc(rte_atomic16_t *v)
237 rte_atomic16_add(v, 1);
242 * Atomically decrement a counter by one.
245 * A pointer to the atomic counter.
248 rte_atomic16_dec(rte_atomic16_t *v);
250 #ifdef RTE_FORCE_INTRINSICS
252 rte_atomic16_dec(rte_atomic16_t *v)
254 rte_atomic16_sub(v, 1);
259 * Atomically add a 16-bit value to a counter and return the result.
261 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
262 * returns the value of v after addition.
265 * A pointer to the atomic counter.
267 * The value to be added to the counter.
269 * The value of v after the addition.
271 static inline int16_t
272 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
274 return __sync_add_and_fetch(&v->cnt, inc);
278 * Atomically subtract a 16-bit value from a counter and return
281 * Atomically subtracts the 16-bit value (inc) from the atomic counter
282 * (v) and returns the value of v after the subtraction.
285 * A pointer to the atomic counter.
287 * The value to be subtracted from the counter.
289 * The value of v after the subtraction.
291 static inline int16_t
292 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
294 return __sync_sub_and_fetch(&v->cnt, dec);
298 * Atomically increment a 16-bit counter by one and test.
300 * Atomically increments the atomic counter (v) by one and returns true if
301 * the result is 0, or false in all other cases.
304 * A pointer to the atomic counter.
306 * True if the result after the increment operation is 0; false otherwise.
308 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
310 #ifdef RTE_FORCE_INTRINSICS
311 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
313 return __sync_add_and_fetch(&v->cnt, 1) == 0;
318 * Atomically decrement a 16-bit counter by one and test.
320 * Atomically decrements the atomic counter (v) by one and returns true if
321 * the result is 0, or false in all other cases.
324 * A pointer to the atomic counter.
326 * True if the result after the decrement operation is 0; false otherwise.
328 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
330 #ifdef RTE_FORCE_INTRINSICS
331 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
333 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
338 * Atomically test and set a 16-bit atomic counter.
340 * If the counter value is already set, return 0 (failed). Otherwise, set
341 * the counter value to 1 and return 1 (success).
344 * A pointer to the atomic counter.
346 * 0 if failed; else 1, success.
348 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
350 #ifdef RTE_FORCE_INTRINSICS
351 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
353 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
358 * Atomically set a 16-bit counter to 0.
361 * A pointer to the atomic counter.
363 static inline void rte_atomic16_clear(rte_atomic16_t *v)
368 /*------------------------- 32 bit atomic operations -------------------------*/
371 * Atomic compare and set.
373 * (atomic) equivalent to:
375 * *dst = src (all 32-bit words)
378 * The destination location into which the value will be written.
380 * The expected value.
384 * Non-zero on success; 0 on failure.
387 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
389 #ifdef RTE_FORCE_INTRINSICS
391 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
393 return __sync_bool_compare_and_swap(dst, exp, src);
398 * The atomic counter structure.
401 volatile int32_t cnt; /**< An internal counter value. */
405 * Static initializer for an atomic counter.
407 #define RTE_ATOMIC32_INIT(val) { (val) }
410 * Initialize an atomic counter.
413 * A pointer to the atomic counter.
416 rte_atomic32_init(rte_atomic32_t *v)
422 * Atomically read a 32-bit value from a counter.
425 * A pointer to the atomic counter.
427 * The value of the counter.
429 static inline int32_t
430 rte_atomic32_read(const rte_atomic32_t *v)
436 * Atomically set a counter to a 32-bit value.
439 * A pointer to the atomic counter.
441 * The new value for the counter.
444 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
450 * Atomically add a 32-bit value to an atomic counter.
453 * A pointer to the atomic counter.
455 * The value to be added to the counter.
458 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
460 __sync_fetch_and_add(&v->cnt, inc);
464 * Atomically subtract a 32-bit value from an atomic counter.
467 * A pointer to the atomic counter.
469 * The value to be subtracted from the counter.
472 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
474 __sync_fetch_and_sub(&v->cnt, dec);
478 * Atomically increment a counter by one.
481 * A pointer to the atomic counter.
484 rte_atomic32_inc(rte_atomic32_t *v);
486 #ifdef RTE_FORCE_INTRINSICS
488 rte_atomic32_inc(rte_atomic32_t *v)
490 rte_atomic32_add(v, 1);
495 * Atomically decrement a counter by one.
498 * A pointer to the atomic counter.
501 rte_atomic32_dec(rte_atomic32_t *v);
503 #ifdef RTE_FORCE_INTRINSICS
505 rte_atomic32_dec(rte_atomic32_t *v)
507 rte_atomic32_sub(v,1);
512 * Atomically add a 32-bit value to a counter and return the result.
514 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
515 * returns the value of v after addition.
518 * A pointer to the atomic counter.
520 * The value to be added to the counter.
522 * The value of v after the addition.
524 static inline int32_t
525 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
527 return __sync_add_and_fetch(&v->cnt, inc);
531 * Atomically subtract a 32-bit value from a counter and return
534 * Atomically subtracts the 32-bit value (inc) from the atomic counter
535 * (v) and returns the value of v after the subtraction.
538 * A pointer to the atomic counter.
540 * The value to be subtracted from the counter.
542 * The value of v after the subtraction.
544 static inline int32_t
545 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
547 return __sync_sub_and_fetch(&v->cnt, dec);
551 * Atomically increment a 32-bit counter by one and test.
553 * Atomically increments the atomic counter (v) by one and returns true if
554 * the result is 0, or false in all other cases.
557 * A pointer to the atomic counter.
559 * True if the result after the increment operation is 0; false otherwise.
561 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
563 #ifdef RTE_FORCE_INTRINSICS
564 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
566 return __sync_add_and_fetch(&v->cnt, 1) == 0;
571 * Atomically decrement a 32-bit counter by one and test.
573 * Atomically decrements the atomic counter (v) by one and returns true if
574 * the result is 0, or false in all other cases.
577 * A pointer to the atomic counter.
579 * True if the result after the decrement operation is 0; false otherwise.
581 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
583 #ifdef RTE_FORCE_INTRINSICS
584 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
586 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
591 * Atomically test and set a 32-bit atomic counter.
593 * If the counter value is already set, return 0 (failed). Otherwise, set
594 * the counter value to 1 and return 1 (success).
597 * A pointer to the atomic counter.
599 * 0 if failed; else 1, success.
601 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
603 #ifdef RTE_FORCE_INTRINSICS
604 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
606 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
611 * Atomically set a 32-bit counter to 0.
614 * A pointer to the atomic counter.
616 static inline void rte_atomic32_clear(rte_atomic32_t *v)
621 /*------------------------- 64 bit atomic operations -------------------------*/
624 * An atomic compare and set function used by the mutex functions.
625 * (atomic) equivalent to:
627 * *dst = src (all 64-bit words)
630 * The destination into which the value will be written.
632 * The expected value.
636 * Non-zero on success; 0 on failure.
639 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
641 #ifdef RTE_FORCE_INTRINSICS
643 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
645 return __sync_bool_compare_and_swap(dst, exp, src);
650 * The atomic counter structure.
653 volatile int64_t cnt; /**< Internal counter value. */
657 * Static initializer for an atomic counter.
659 #define RTE_ATOMIC64_INIT(val) { (val) }
662 * Initialize the atomic counter.
665 * A pointer to the atomic counter.
668 rte_atomic64_init(rte_atomic64_t *v);
670 #ifdef RTE_FORCE_INTRINSICS
672 rte_atomic64_init(rte_atomic64_t *v)
680 while (success == 0) {
682 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
690 * Atomically read a 64-bit counter.
693 * A pointer to the atomic counter.
695 * The value of the counter.
697 static inline int64_t
698 rte_atomic64_read(rte_atomic64_t *v);
700 #ifdef RTE_FORCE_INTRINSICS
701 static inline int64_t
702 rte_atomic64_read(rte_atomic64_t *v)
710 while (success == 0) {
712 /* replace the value by itself */
713 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
722 * Atomically set a 64-bit counter.
725 * A pointer to the atomic counter.
727 * The new value of the counter.
730 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
732 #ifdef RTE_FORCE_INTRINSICS
734 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
742 while (success == 0) {
744 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
752 * Atomically add a 64-bit value to a counter.
755 * A pointer to the atomic counter.
757 * The value to be added to the counter.
760 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
762 #ifdef RTE_FORCE_INTRINSICS
764 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
766 __sync_fetch_and_add(&v->cnt, inc);
771 * Atomically subtract a 64-bit value from a counter.
774 * A pointer to the atomic counter.
776 * The value to be subtracted from the counter.
779 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
781 #ifdef RTE_FORCE_INTRINSICS
783 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
785 __sync_fetch_and_sub(&v->cnt, dec);
790 * Atomically increment a 64-bit counter by one and test.
793 * A pointer to the atomic counter.
796 rte_atomic64_inc(rte_atomic64_t *v);
798 #ifdef RTE_FORCE_INTRINSICS
800 rte_atomic64_inc(rte_atomic64_t *v)
802 rte_atomic64_add(v, 1);
807 * Atomically decrement a 64-bit counter by one and test.
810 * A pointer to the atomic counter.
813 rte_atomic64_dec(rte_atomic64_t *v);
815 #ifdef RTE_FORCE_INTRINSICS
817 rte_atomic64_dec(rte_atomic64_t *v)
819 rte_atomic64_sub(v, 1);
824 * Add a 64-bit value to an atomic counter and return the result.
826 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
827 * returns the value of v after the addition.
830 * A pointer to the atomic counter.
832 * The value to be added to the counter.
834 * The value of v after the addition.
836 static inline int64_t
837 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
839 #ifdef RTE_FORCE_INTRINSICS
840 static inline int64_t
841 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
843 return __sync_add_and_fetch(&v->cnt, inc);
848 * Subtract a 64-bit value from an atomic counter and return the result.
850 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
851 * and returns the value of v after the subtraction.
854 * A pointer to the atomic counter.
856 * The value to be subtracted from the counter.
858 * The value of v after the subtraction.
860 static inline int64_t
861 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
863 #ifdef RTE_FORCE_INTRINSICS
864 static inline int64_t
865 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
867 return __sync_sub_and_fetch(&v->cnt, dec);
872 * Atomically increment a 64-bit counter by one and test.
874 * Atomically increments the atomic counter (v) by one and returns
875 * true if the result is 0, or false in all other cases.
878 * A pointer to the atomic counter.
880 * True if the result after the addition is 0; false otherwise.
882 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
884 #ifdef RTE_FORCE_INTRINSICS
885 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
887 return rte_atomic64_add_return(v, 1) == 0;
892 * Atomically decrement a 64-bit counter by one and test.
894 * Atomically decrements the atomic counter (v) by one and returns true if
895 * the result is 0, or false in all other cases.
898 * A pointer to the atomic counter.
900 * True if the result after subtraction is 0; false otherwise.
902 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
904 #ifdef RTE_FORCE_INTRINSICS
905 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
907 return rte_atomic64_sub_return(v, 1) == 0;
912 * Atomically test and set a 64-bit atomic counter.
914 * If the counter value is already set, return 0 (failed). Otherwise, set
915 * the counter value to 1 and return 1 (success).
918 * A pointer to the atomic counter.
920 * 0 if failed; else 1, success.
922 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
924 #ifdef RTE_FORCE_INTRINSICS
925 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
927 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
932 * Atomically set a 64-bit counter to 0.
935 * A pointer to the atomic counter.
937 static inline void rte_atomic64_clear(rte_atomic64_t *v);
939 #ifdef RTE_FORCE_INTRINSICS
940 static inline void rte_atomic64_clear(rte_atomic64_t *v)
942 rte_atomic64_set(v, 0);
946 #endif /* _RTE_ATOMIC_H_ */