4 * Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34 #ifndef _RTE_ATOMIC_H_
35 #define _RTE_ATOMIC_H_
41 * This file defines a generic API for atomic operations.
49 * General memory barrier.
51 * Guarantees that the LOAD and STORE operations generated before the
52 * barrier occur before the LOAD and STORE operations generated after.
53 * This function is architecture dependent.
55 static inline void rte_mb(void);
58 * Write memory barrier.
60 * Guarantees that the STORE operations generated before the barrier
61 * occur before the STORE operations generated after.
62 * This function is architecture dependent.
64 static inline void rte_wmb(void);
67 * Read memory barrier.
69 * Guarantees that the LOAD operations generated before the barrier
70 * occur before the LOAD operations generated after.
71 * This function is architecture dependent.
73 static inline void rte_rmb(void);
75 #endif /* __DOXYGEN__ */
80 * Guarantees that operation reordering does not occur at compile time
81 * for operations directly before and after the barrier.
83 #define rte_compiler_barrier() do { \
84 asm volatile ("" : : : "memory"); \
87 /*------------------------- 16 bit atomic operations -------------------------*/
90 * Atomic compare and set.
92 * (atomic) equivalent to:
94 * *dst = src (all 16-bit words)
97 * The destination location into which the value will be written.
103 * Non-zero on success; 0 on failure.
106 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
108 #ifdef RTE_FORCE_INTRINSICS
110 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
112 return __sync_bool_compare_and_swap(dst, exp, src);
117 * The atomic counter structure.
120 volatile int16_t cnt; /**< An internal counter value. */
124 * Static initializer for an atomic counter.
126 #define RTE_ATOMIC16_INIT(val) { (val) }
129 * Initialize an atomic counter.
132 * A pointer to the atomic counter.
135 rte_atomic16_init(rte_atomic16_t *v)
141 * Atomically read a 16-bit value from a counter.
144 * A pointer to the atomic counter.
146 * The value of the counter.
148 static inline int16_t
149 rte_atomic16_read(const rte_atomic16_t *v)
155 * Atomically set a counter to a 16-bit value.
158 * A pointer to the atomic counter.
160 * The new value for the counter.
163 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
169 * Atomically add a 16-bit value to an atomic counter.
172 * A pointer to the atomic counter.
174 * The value to be added to the counter.
177 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
179 __sync_fetch_and_add(&v->cnt, inc);
183 * Atomically subtract a 16-bit value from an atomic counter.
186 * A pointer to the atomic counter.
188 * The value to be subtracted from the counter.
191 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
193 __sync_fetch_and_sub(&v->cnt, dec);
197 * Atomically increment a counter by one.
200 * A pointer to the atomic counter.
203 rte_atomic16_inc(rte_atomic16_t *v);
205 #ifdef RTE_FORCE_INTRINSICS
207 rte_atomic16_inc(rte_atomic16_t *v)
209 rte_atomic16_add(v, 1);
214 * Atomically decrement a counter by one.
217 * A pointer to the atomic counter.
220 rte_atomic16_dec(rte_atomic16_t *v);
222 #ifdef RTE_FORCE_INTRINSICS
224 rte_atomic16_dec(rte_atomic16_t *v)
226 rte_atomic16_sub(v, 1);
231 * Atomically add a 16-bit value to a counter and return the result.
233 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
234 * returns the value of v after addition.
237 * A pointer to the atomic counter.
239 * The value to be added to the counter.
241 * The value of v after the addition.
243 static inline int16_t
244 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
246 return __sync_add_and_fetch(&v->cnt, inc);
250 * Atomically subtract a 16-bit value from a counter and return
253 * Atomically subtracts the 16-bit value (inc) from the atomic counter
254 * (v) and returns the value of v after the subtraction.
257 * A pointer to the atomic counter.
259 * The value to be subtracted from the counter.
261 * The value of v after the subtraction.
263 static inline int16_t
264 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
266 return __sync_sub_and_fetch(&v->cnt, dec);
270 * Atomically increment a 16-bit counter by one and test.
272 * Atomically increments the atomic counter (v) by one and returns true if
273 * the result is 0, or false in all other cases.
276 * A pointer to the atomic counter.
278 * True if the result after the increment operation is 0; false otherwise.
280 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
282 #ifdef RTE_FORCE_INTRINSICS
283 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
285 return (__sync_add_and_fetch(&v->cnt, 1) == 0);
290 * Atomically decrement a 16-bit counter by one and test.
292 * Atomically decrements the atomic counter (v) by one and returns true if
293 * the result is 0, or false in all other cases.
296 * A pointer to the atomic counter.
298 * True if the result after the decrement operation is 0; false otherwise.
300 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
302 #ifdef RTE_FORCE_INTRINSICS
303 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
305 return (__sync_sub_and_fetch(&v->cnt, 1) == 0);
310 * Atomically test and set a 16-bit atomic counter.
312 * If the counter value is already set, return 0 (failed). Otherwise, set
313 * the counter value to 1 and return 1 (success).
316 * A pointer to the atomic counter.
318 * 0 if failed; else 1, success.
320 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
322 #ifdef RTE_FORCE_INTRINSICS
323 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
325 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
330 * Atomically set a 16-bit counter to 0.
333 * A pointer to the atomic counter.
335 static inline void rte_atomic16_clear(rte_atomic16_t *v)
340 /*------------------------- 32 bit atomic operations -------------------------*/
343 * Atomic compare and set.
345 * (atomic) equivalent to:
347 * *dst = src (all 32-bit words)
350 * The destination location into which the value will be written.
352 * The expected value.
356 * Non-zero on success; 0 on failure.
359 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
361 #ifdef RTE_FORCE_INTRINSICS
363 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
365 return __sync_bool_compare_and_swap(dst, exp, src);
370 * The atomic counter structure.
373 volatile int32_t cnt; /**< An internal counter value. */
377 * Static initializer for an atomic counter.
379 #define RTE_ATOMIC32_INIT(val) { (val) }
382 * Initialize an atomic counter.
385 * A pointer to the atomic counter.
388 rte_atomic32_init(rte_atomic32_t *v)
394 * Atomically read a 32-bit value from a counter.
397 * A pointer to the atomic counter.
399 * The value of the counter.
401 static inline int32_t
402 rte_atomic32_read(const rte_atomic32_t *v)
408 * Atomically set a counter to a 32-bit value.
411 * A pointer to the atomic counter.
413 * The new value for the counter.
416 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
422 * Atomically add a 32-bit value to an atomic counter.
425 * A pointer to the atomic counter.
427 * The value to be added to the counter.
430 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
432 __sync_fetch_and_add(&v->cnt, inc);
436 * Atomically subtract a 32-bit value from an atomic counter.
439 * A pointer to the atomic counter.
441 * The value to be subtracted from the counter.
444 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
446 __sync_fetch_and_sub(&v->cnt, dec);
450 * Atomically increment a counter by one.
453 * A pointer to the atomic counter.
456 rte_atomic32_inc(rte_atomic32_t *v);
458 #ifdef RTE_FORCE_INTRINSICS
460 rte_atomic32_inc(rte_atomic32_t *v)
462 rte_atomic32_add(v, 1);
467 * Atomically decrement a counter by one.
470 * A pointer to the atomic counter.
473 rte_atomic32_dec(rte_atomic32_t *v);
475 #ifdef RTE_FORCE_INTRINSICS
477 rte_atomic32_dec(rte_atomic32_t *v)
479 rte_atomic32_sub(v,1);
484 * Atomically add a 32-bit value to a counter and return the result.
486 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
487 * returns the value of v after addition.
490 * A pointer to the atomic counter.
492 * The value to be added to the counter.
494 * The value of v after the addition.
496 static inline int32_t
497 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
499 return __sync_add_and_fetch(&v->cnt, inc);
503 * Atomically subtract a 32-bit value from a counter and return
506 * Atomically subtracts the 32-bit value (inc) from the atomic counter
507 * (v) and returns the value of v after the subtraction.
510 * A pointer to the atomic counter.
512 * The value to be subtracted from the counter.
514 * The value of v after the subtraction.
516 static inline int32_t
517 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
519 return __sync_sub_and_fetch(&v->cnt, dec);
523 * Atomically increment a 32-bit counter by one and test.
525 * Atomically increments the atomic counter (v) by one and returns true if
526 * the result is 0, or false in all other cases.
529 * A pointer to the atomic counter.
531 * True if the result after the increment operation is 0; false otherwise.
533 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
535 #ifdef RTE_FORCE_INTRINSICS
536 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
538 return (__sync_add_and_fetch(&v->cnt, 1) == 0);
543 * Atomically decrement a 32-bit counter by one and test.
545 * Atomically decrements the atomic counter (v) by one and returns true if
546 * the result is 0, or false in all other cases.
549 * A pointer to the atomic counter.
551 * True if the result after the decrement operation is 0; false otherwise.
553 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
555 #ifdef RTE_FORCE_INTRINSICS
556 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
558 return (__sync_sub_and_fetch(&v->cnt, 1) == 0);
563 * Atomically test and set a 32-bit atomic counter.
565 * If the counter value is already set, return 0 (failed). Otherwise, set
566 * the counter value to 1 and return 1 (success).
569 * A pointer to the atomic counter.
571 * 0 if failed; else 1, success.
573 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
575 #ifdef RTE_FORCE_INTRINSICS
576 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
578 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
583 * Atomically set a 32-bit counter to 0.
586 * A pointer to the atomic counter.
588 static inline void rte_atomic32_clear(rte_atomic32_t *v)
593 /*------------------------- 64 bit atomic operations -------------------------*/
596 * An atomic compare and set function used by the mutex functions.
597 * (atomic) equivalent to:
599 * *dst = src (all 64-bit words)
602 * The destination into which the value will be written.
604 * The expected value.
608 * Non-zero on success; 0 on failure.
611 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
613 #ifdef RTE_FORCE_INTRINSICS
615 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
617 return __sync_bool_compare_and_swap(dst, exp, src);
622 * The atomic counter structure.
625 volatile int64_t cnt; /**< Internal counter value. */
629 * Static initializer for an atomic counter.
631 #define RTE_ATOMIC64_INIT(val) { (val) }
634 * Initialize the atomic counter.
637 * A pointer to the atomic counter.
640 rte_atomic64_init(rte_atomic64_t *v);
642 #ifdef RTE_FORCE_INTRINSICS
644 rte_atomic64_init(rte_atomic64_t *v)
652 while (success == 0) {
654 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
662 * Atomically read a 64-bit counter.
665 * A pointer to the atomic counter.
667 * The value of the counter.
669 static inline int64_t
670 rte_atomic64_read(rte_atomic64_t *v);
672 #ifdef RTE_FORCE_INTRINSICS
673 static inline int64_t
674 rte_atomic64_read(rte_atomic64_t *v)
682 while (success == 0) {
684 /* replace the value by itself */
685 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
694 * Atomically set a 64-bit counter.
697 * A pointer to the atomic counter.
699 * The new value of the counter.
702 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
704 #ifdef RTE_FORCE_INTRINSICS
706 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
714 while (success == 0) {
716 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
724 * Atomically add a 64-bit value to a counter.
727 * A pointer to the atomic counter.
729 * The value to be added to the counter.
732 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
734 #ifdef RTE_FORCE_INTRINSICS
736 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
738 __sync_fetch_and_add(&v->cnt, inc);
743 * Atomically subtract a 64-bit value from a counter.
746 * A pointer to the atomic counter.
748 * The value to be subtracted from the counter.
751 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
753 #ifdef RTE_FORCE_INTRINSICS
755 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
757 __sync_fetch_and_sub(&v->cnt, dec);
762 * Atomically increment a 64-bit counter by one and test.
765 * A pointer to the atomic counter.
768 rte_atomic64_inc(rte_atomic64_t *v);
770 #ifdef RTE_FORCE_INTRINSICS
772 rte_atomic64_inc(rte_atomic64_t *v)
774 rte_atomic64_add(v, 1);
779 * Atomically decrement a 64-bit counter by one and test.
782 * A pointer to the atomic counter.
785 rte_atomic64_dec(rte_atomic64_t *v);
787 #ifdef RTE_FORCE_INTRINSICS
789 rte_atomic64_dec(rte_atomic64_t *v)
791 rte_atomic64_sub(v, 1);
796 * Add a 64-bit value to an atomic counter and return the result.
798 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
799 * returns the value of v after the addition.
802 * A pointer to the atomic counter.
804 * The value to be added to the counter.
806 * The value of v after the addition.
808 static inline int64_t
809 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
811 #ifdef RTE_FORCE_INTRINSICS
812 static inline int64_t
813 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
815 return __sync_add_and_fetch(&v->cnt, inc);
820 * Subtract a 64-bit value from an atomic counter and return the result.
822 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
823 * and returns the value of v after the subtraction.
826 * A pointer to the atomic counter.
828 * The value to be subtracted from the counter.
830 * The value of v after the subtraction.
832 static inline int64_t
833 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
835 #ifdef RTE_FORCE_INTRINSICS
836 static inline int64_t
837 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
839 return __sync_sub_and_fetch(&v->cnt, dec);
844 * Atomically increment a 64-bit counter by one and test.
846 * Atomically increments the atomic counter (v) by one and returns
847 * true if the result is 0, or false in all other cases.
850 * A pointer to the atomic counter.
852 * True if the result after the addition is 0; false otherwise.
854 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
856 #ifdef RTE_FORCE_INTRINSICS
857 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
859 return rte_atomic64_add_return(v, 1) == 0;
864 * Atomically decrement a 64-bit counter by one and test.
866 * Atomically decrements the atomic counter (v) by one and returns true if
867 * the result is 0, or false in all other cases.
870 * A pointer to the atomic counter.
872 * True if the result after subtraction is 0; false otherwise.
874 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
876 #ifdef RTE_FORCE_INTRINSICS
877 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
879 return rte_atomic64_sub_return(v, 1) == 0;
884 * Atomically test and set a 64-bit atomic counter.
886 * If the counter value is already set, return 0 (failed). Otherwise, set
887 * the counter value to 1 and return 1 (success).
890 * A pointer to the atomic counter.
892 * 0 if failed; else 1, success.
894 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
896 #ifdef RTE_FORCE_INTRINSICS
897 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
899 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
904 * Atomically set a 64-bit counter to 0.
907 * A pointer to the atomic counter.
909 static inline void rte_atomic64_clear(rte_atomic64_t *v);
911 #ifdef RTE_FORCE_INTRINSICS
912 static inline void rte_atomic64_clear(rte_atomic64_t *v)
914 rte_atomic64_set(v, 0);
918 #endif /* _RTE_ATOMIC_H_ */