4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
41 * This file defines a generic API for atomic operations.
49 * General memory barrier.
51 * Guarantees that the LOAD and STORE operations generated before the
52 * barrier occur before the LOAD and STORE operations generated after.
53 * This function is architecture dependent.
55 static inline void rte_mb(void);
58 * Write memory barrier.
60 * Guarantees that the STORE operations generated before the barrier
61 * occur before the STORE operations generated after.
62 * This function is architecture dependent.
64 static inline void rte_wmb(void);
67 * Read memory barrier.
69 * Guarantees that the LOAD operations generated before the barrier
70 * occur before the LOAD operations generated after.
71 * This function is architecture dependent.
73 static inline void rte_rmb(void);
76 * General memory barrier between lcores
78 * Guarantees that the LOAD and STORE operations that precede the
79 * rte_smp_mb() call are globally visible across the lcores
80 * before the the LOAD and STORE operations that follows it.
82 static inline void rte_smp_mb(void);
85 * Write memory barrier between lcores
87 * Guarantees that the STORE operations that precede the
88 * rte_smp_wmb() call are globally visible across the lcores
89 * before the the STORE operations that follows it.
91 static inline void rte_smp_wmb(void);
94 * Read memory barrier between lcores
96 * Guarantees that the LOAD operations that precede the
97 * rte_smp_rmb() call are globally visible across the lcores
98 * before the the LOAD operations that follows it.
100 static inline void rte_smp_rmb(void);
102 #endif /* __DOXYGEN__ */
107 * Guarantees that operation reordering does not occur at compile time
108 * for operations directly before and after the barrier.
110 #define rte_compiler_barrier() do { \
111 asm volatile ("" : : : "memory"); \
114 /*------------------------- 16 bit atomic operations -------------------------*/
117 * Atomic compare and set.
119 * (atomic) equivalent to:
121 * *dst = src (all 16-bit words)
124 * The destination location into which the value will be written.
126 * The expected value.
130 * Non-zero on success; 0 on failure.
133 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
135 #ifdef RTE_FORCE_INTRINSICS
137 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
139 return __sync_bool_compare_and_swap(dst, exp, src);
144 * The atomic counter structure.
147 volatile int16_t cnt; /**< An internal counter value. */
151 * Static initializer for an atomic counter.
153 #define RTE_ATOMIC16_INIT(val) { (val) }
156 * Initialize an atomic counter.
159 * A pointer to the atomic counter.
162 rte_atomic16_init(rte_atomic16_t *v)
168 * Atomically read a 16-bit value from a counter.
171 * A pointer to the atomic counter.
173 * The value of the counter.
175 static inline int16_t
176 rte_atomic16_read(const rte_atomic16_t *v)
182 * Atomically set a counter to a 16-bit value.
185 * A pointer to the atomic counter.
187 * The new value for the counter.
190 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
196 * Atomically add a 16-bit value to an atomic counter.
199 * A pointer to the atomic counter.
201 * The value to be added to the counter.
204 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
206 __sync_fetch_and_add(&v->cnt, inc);
210 * Atomically subtract a 16-bit value from an atomic counter.
213 * A pointer to the atomic counter.
215 * The value to be subtracted from the counter.
218 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
220 __sync_fetch_and_sub(&v->cnt, dec);
224 * Atomically increment a counter by one.
227 * A pointer to the atomic counter.
230 rte_atomic16_inc(rte_atomic16_t *v);
232 #ifdef RTE_FORCE_INTRINSICS
234 rte_atomic16_inc(rte_atomic16_t *v)
236 rte_atomic16_add(v, 1);
241 * Atomically decrement a counter by one.
244 * A pointer to the atomic counter.
247 rte_atomic16_dec(rte_atomic16_t *v);
249 #ifdef RTE_FORCE_INTRINSICS
251 rte_atomic16_dec(rte_atomic16_t *v)
253 rte_atomic16_sub(v, 1);
258 * Atomically add a 16-bit value to a counter and return the result.
260 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
261 * returns the value of v after addition.
264 * A pointer to the atomic counter.
266 * The value to be added to the counter.
268 * The value of v after the addition.
270 static inline int16_t
271 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
273 return __sync_add_and_fetch(&v->cnt, inc);
277 * Atomically subtract a 16-bit value from a counter and return
280 * Atomically subtracts the 16-bit value (inc) from the atomic counter
281 * (v) and returns the value of v after the subtraction.
284 * A pointer to the atomic counter.
286 * The value to be subtracted from the counter.
288 * The value of v after the subtraction.
290 static inline int16_t
291 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
293 return __sync_sub_and_fetch(&v->cnt, dec);
297 * Atomically increment a 16-bit counter by one and test.
299 * Atomically increments the atomic counter (v) by one and returns true if
300 * the result is 0, or false in all other cases.
303 * A pointer to the atomic counter.
305 * True if the result after the increment operation is 0; false otherwise.
307 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
309 #ifdef RTE_FORCE_INTRINSICS
310 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
312 return __sync_add_and_fetch(&v->cnt, 1) == 0;
317 * Atomically decrement a 16-bit counter by one and test.
319 * Atomically decrements the atomic counter (v) by one and returns true if
320 * the result is 0, or false in all other cases.
323 * A pointer to the atomic counter.
325 * True if the result after the decrement operation is 0; false otherwise.
327 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
329 #ifdef RTE_FORCE_INTRINSICS
330 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
332 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
337 * Atomically test and set a 16-bit atomic counter.
339 * If the counter value is already set, return 0 (failed). Otherwise, set
340 * the counter value to 1 and return 1 (success).
343 * A pointer to the atomic counter.
345 * 0 if failed; else 1, success.
347 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
349 #ifdef RTE_FORCE_INTRINSICS
350 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
352 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
357 * Atomically set a 16-bit counter to 0.
360 * A pointer to the atomic counter.
362 static inline void rte_atomic16_clear(rte_atomic16_t *v)
367 /*------------------------- 32 bit atomic operations -------------------------*/
370 * Atomic compare and set.
372 * (atomic) equivalent to:
374 * *dst = src (all 32-bit words)
377 * The destination location into which the value will be written.
379 * The expected value.
383 * Non-zero on success; 0 on failure.
386 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
388 #ifdef RTE_FORCE_INTRINSICS
390 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
392 return __sync_bool_compare_and_swap(dst, exp, src);
397 * The atomic counter structure.
400 volatile int32_t cnt; /**< An internal counter value. */
404 * Static initializer for an atomic counter.
406 #define RTE_ATOMIC32_INIT(val) { (val) }
409 * Initialize an atomic counter.
412 * A pointer to the atomic counter.
415 rte_atomic32_init(rte_atomic32_t *v)
421 * Atomically read a 32-bit value from a counter.
424 * A pointer to the atomic counter.
426 * The value of the counter.
428 static inline int32_t
429 rte_atomic32_read(const rte_atomic32_t *v)
435 * Atomically set a counter to a 32-bit value.
438 * A pointer to the atomic counter.
440 * The new value for the counter.
443 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
449 * Atomically add a 32-bit value to an atomic counter.
452 * A pointer to the atomic counter.
454 * The value to be added to the counter.
457 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
459 __sync_fetch_and_add(&v->cnt, inc);
463 * Atomically subtract a 32-bit value from an atomic counter.
466 * A pointer to the atomic counter.
468 * The value to be subtracted from the counter.
471 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
473 __sync_fetch_and_sub(&v->cnt, dec);
477 * Atomically increment a counter by one.
480 * A pointer to the atomic counter.
483 rte_atomic32_inc(rte_atomic32_t *v);
485 #ifdef RTE_FORCE_INTRINSICS
487 rte_atomic32_inc(rte_atomic32_t *v)
489 rte_atomic32_add(v, 1);
494 * Atomically decrement a counter by one.
497 * A pointer to the atomic counter.
500 rte_atomic32_dec(rte_atomic32_t *v);
502 #ifdef RTE_FORCE_INTRINSICS
504 rte_atomic32_dec(rte_atomic32_t *v)
506 rte_atomic32_sub(v,1);
511 * Atomically add a 32-bit value to a counter and return the result.
513 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
514 * returns the value of v after addition.
517 * A pointer to the atomic counter.
519 * The value to be added to the counter.
521 * The value of v after the addition.
523 static inline int32_t
524 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
526 return __sync_add_and_fetch(&v->cnt, inc);
530 * Atomically subtract a 32-bit value from a counter and return
533 * Atomically subtracts the 32-bit value (inc) from the atomic counter
534 * (v) and returns the value of v after the subtraction.
537 * A pointer to the atomic counter.
539 * The value to be subtracted from the counter.
541 * The value of v after the subtraction.
543 static inline int32_t
544 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
546 return __sync_sub_and_fetch(&v->cnt, dec);
550 * Atomically increment a 32-bit counter by one and test.
552 * Atomically increments the atomic counter (v) by one and returns true if
553 * the result is 0, or false in all other cases.
556 * A pointer to the atomic counter.
558 * True if the result after the increment operation is 0; false otherwise.
560 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
562 #ifdef RTE_FORCE_INTRINSICS
563 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
565 return __sync_add_and_fetch(&v->cnt, 1) == 0;
570 * Atomically decrement a 32-bit counter by one and test.
572 * Atomically decrements the atomic counter (v) by one and returns true if
573 * the result is 0, or false in all other cases.
576 * A pointer to the atomic counter.
578 * True if the result after the decrement operation is 0; false otherwise.
580 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
582 #ifdef RTE_FORCE_INTRINSICS
583 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
585 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
590 * Atomically test and set a 32-bit atomic counter.
592 * If the counter value is already set, return 0 (failed). Otherwise, set
593 * the counter value to 1 and return 1 (success).
596 * A pointer to the atomic counter.
598 * 0 if failed; else 1, success.
600 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
602 #ifdef RTE_FORCE_INTRINSICS
603 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
605 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
610 * Atomically set a 32-bit counter to 0.
613 * A pointer to the atomic counter.
615 static inline void rte_atomic32_clear(rte_atomic32_t *v)
620 /*------------------------- 64 bit atomic operations -------------------------*/
623 * An atomic compare and set function used by the mutex functions.
624 * (atomic) equivalent to:
626 * *dst = src (all 64-bit words)
629 * The destination into which the value will be written.
631 * The expected value.
635 * Non-zero on success; 0 on failure.
638 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
640 #ifdef RTE_FORCE_INTRINSICS
642 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
644 return __sync_bool_compare_and_swap(dst, exp, src);
649 * The atomic counter structure.
652 volatile int64_t cnt; /**< Internal counter value. */
656 * Static initializer for an atomic counter.
658 #define RTE_ATOMIC64_INIT(val) { (val) }
661 * Initialize the atomic counter.
664 * A pointer to the atomic counter.
667 rte_atomic64_init(rte_atomic64_t *v);
669 #ifdef RTE_FORCE_INTRINSICS
671 rte_atomic64_init(rte_atomic64_t *v)
679 while (success == 0) {
681 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
689 * Atomically read a 64-bit counter.
692 * A pointer to the atomic counter.
694 * The value of the counter.
696 static inline int64_t
697 rte_atomic64_read(rte_atomic64_t *v);
699 #ifdef RTE_FORCE_INTRINSICS
700 static inline int64_t
701 rte_atomic64_read(rte_atomic64_t *v)
709 while (success == 0) {
711 /* replace the value by itself */
712 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
721 * Atomically set a 64-bit counter.
724 * A pointer to the atomic counter.
726 * The new value of the counter.
729 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
731 #ifdef RTE_FORCE_INTRINSICS
733 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
741 while (success == 0) {
743 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
751 * Atomically add a 64-bit value to a counter.
754 * A pointer to the atomic counter.
756 * The value to be added to the counter.
759 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
761 #ifdef RTE_FORCE_INTRINSICS
763 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
765 __sync_fetch_and_add(&v->cnt, inc);
770 * Atomically subtract a 64-bit value from a counter.
773 * A pointer to the atomic counter.
775 * The value to be subtracted from the counter.
778 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
780 #ifdef RTE_FORCE_INTRINSICS
782 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
784 __sync_fetch_and_sub(&v->cnt, dec);
789 * Atomically increment a 64-bit counter by one and test.
792 * A pointer to the atomic counter.
795 rte_atomic64_inc(rte_atomic64_t *v);
797 #ifdef RTE_FORCE_INTRINSICS
799 rte_atomic64_inc(rte_atomic64_t *v)
801 rte_atomic64_add(v, 1);
806 * Atomically decrement a 64-bit counter by one and test.
809 * A pointer to the atomic counter.
812 rte_atomic64_dec(rte_atomic64_t *v);
814 #ifdef RTE_FORCE_INTRINSICS
816 rte_atomic64_dec(rte_atomic64_t *v)
818 rte_atomic64_sub(v, 1);
823 * Add a 64-bit value to an atomic counter and return the result.
825 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
826 * returns the value of v after the addition.
829 * A pointer to the atomic counter.
831 * The value to be added to the counter.
833 * The value of v after the addition.
835 static inline int64_t
836 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
838 #ifdef RTE_FORCE_INTRINSICS
839 static inline int64_t
840 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
842 return __sync_add_and_fetch(&v->cnt, inc);
847 * Subtract a 64-bit value from an atomic counter and return the result.
849 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
850 * and returns the value of v after the subtraction.
853 * A pointer to the atomic counter.
855 * The value to be subtracted from the counter.
857 * The value of v after the subtraction.
859 static inline int64_t
860 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
862 #ifdef RTE_FORCE_INTRINSICS
863 static inline int64_t
864 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
866 return __sync_sub_and_fetch(&v->cnt, dec);
871 * Atomically increment a 64-bit counter by one and test.
873 * Atomically increments the atomic counter (v) by one and returns
874 * true if the result is 0, or false in all other cases.
877 * A pointer to the atomic counter.
879 * True if the result after the addition is 0; false otherwise.
881 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
883 #ifdef RTE_FORCE_INTRINSICS
884 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
886 return rte_atomic64_add_return(v, 1) == 0;
891 * Atomically decrement a 64-bit counter by one and test.
893 * Atomically decrements the atomic counter (v) by one and returns true if
894 * the result is 0, or false in all other cases.
897 * A pointer to the atomic counter.
899 * True if the result after subtraction is 0; false otherwise.
901 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
903 #ifdef RTE_FORCE_INTRINSICS
904 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
906 return rte_atomic64_sub_return(v, 1) == 0;
911 * Atomically test and set a 64-bit atomic counter.
913 * If the counter value is already set, return 0 (failed). Otherwise, set
914 * the counter value to 1 and return 1 (success).
917 * A pointer to the atomic counter.
919 * 0 if failed; else 1, success.
921 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
923 #ifdef RTE_FORCE_INTRINSICS
924 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
926 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
931 * Atomically set a 64-bit counter to 0.
934 * A pointer to the atomic counter.
936 static inline void rte_atomic64_clear(rte_atomic64_t *v);
938 #ifdef RTE_FORCE_INTRINSICS
939 static inline void rte_atomic64_clear(rte_atomic64_t *v)
941 rte_atomic64_set(v, 0);
945 #endif /* _RTE_ATOMIC_H_ */