4 * Copyright(c) 2010-2012 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 * Inspired from FreeBSD src/sys/amd64/include/atomic.h
37 * Copyright (c) 1998 Doug Rabson
38 * All rights reserved.
41 #ifndef _RTE_ATOMIC_H_
42 #error "don't include this file directly, please include generic <rte_atomic.h>"
45 #ifndef _RTE_X86_64_ATOMIC_H_
46 #define _RTE_X86_64_ATOMIC_H_
50 * Atomic Operations on x86_64
53 #if RTE_MAX_LCORE == 1
54 #define MPLOCKED /**< No need to insert MP lock prefix. */
56 #define MPLOCKED "lock ; " /**< Insert MP lock prefix. */
60 * General memory barrier.
62 * Guarantees that the LOAD and STORE operations generated before the
63 * barrier occur before the LOAD and STORE operations generated after.
65 #define rte_mb() asm volatile("mfence;" : : : "memory")
68 * Write memory barrier.
70 * Guarantees that the STORE operations generated before the barrier
71 * occur before the STORE operations generated after.
73 #define rte_wmb() asm volatile("sfence;" : : : "memory")
76 * Read memory barrier.
78 * Guarantees that the LOAD operations generated before the barrier
79 * occur before the LOAD operations generated after.
81 #define rte_rmb() asm volatile("lfence;" : : : "memory")
83 /*------------------------- 16 bit atomic operations -------------------------*/
86 * Atomic compare and set.
88 * (atomic) equivalent to:
90 * *dst = src (all 16-bit words)
93 * The destination location into which the value will be written.
99 * Non-zero on success; 0 on failure.
102 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
108 "cmpxchgw %[src], %[dst];"
110 : [res] "=a" (res), /* output */
112 : [src] "r" (src), /* input */
115 : "memory"); /* no-clobber list */
120 * The atomic counter structure.
123 volatile int16_t cnt; /**< An internal counter value. */
127 * Static initializer for an atomic counter.
129 #define RTE_ATOMIC16_INIT(val) { (val) }
132 * Initialize an atomic counter.
135 * A pointer to the atomic counter.
138 rte_atomic16_init(rte_atomic16_t *v)
144 * Atomically read a 16-bit value from a counter.
147 * A pointer to the atomic counter.
149 * The value of the counter.
151 static inline int16_t
152 rte_atomic16_read(const rte_atomic16_t *v)
158 * Atomically set a counter to a 16-bit value.
161 * A pointer to the atomic counter.
163 * The new value for the counter.
166 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
172 * Atomically add a 16-bit value to an atomic counter.
175 * A pointer to the atomic counter.
177 * The value to be added to the counter.
180 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
184 "addw %[inc], %[cnt]"
185 : [cnt] "=m" (v->cnt) /* output */
186 : [inc] "ir" (inc), /* input */
192 * Atomically subtract a 16-bit value from an atomic counter.
195 * A pointer to the atomic counter.
197 * The value to be subtracted from the counter.
200 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
204 "subw %[dec], %[cnt]"
205 : [cnt] "=m" (v->cnt) /* output */
206 : [dec] "ir" (dec), /* input */
212 * Atomically increment a counter by one.
215 * A pointer to the atomic counter.
218 rte_atomic16_inc(rte_atomic16_t *v)
223 : [cnt] "=m" (v->cnt) /* output */
224 : "m" (v->cnt) /* input */
229 * Atomically decrement a counter by one.
232 * A pointer to the atomic counter.
235 rte_atomic16_dec(rte_atomic16_t *v)
240 : [cnt] "=m" (v->cnt) /* output */
241 : "m" (v->cnt) /* input */
246 * Atomically add a 16-bit value to a counter and return the result.
248 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
249 * returns the value of v after addition.
252 * A pointer to the atomic counter.
254 * The value to be added to the counter.
256 * The value of v after the addition.
258 static inline int16_t
259 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
265 "xaddw %[prev], %[cnt]"
266 : [prev] "+r" (prev), /* output */
268 : "m" (v->cnt) /* input */
270 return (int16_t)(prev + inc);
274 * Atomically subtract a 16-bit value from a counter and return
277 * Atomically subtracts the 16-bit value (inc) from the atomic counter
278 * (v) and returns the value of v after the subtraction.
281 * A pointer to the atomic counter.
283 * The value to be subtracted from the counter.
285 * The value of v after the subtraction.
287 static inline int16_t
288 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
290 return rte_atomic16_add_return(v, (int16_t)-dec);
294 * Atomically increment a 16-bit counter by one and test.
296 * Atomically increments the atomic counter (v) by one and returns true if
297 * the result is 0, or false in all other cases.
300 * A pointer to the atomic counter.
302 * True if the result after the increment operation is 0; false otherwise.
304 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
312 : [cnt] "+m" (v->cnt), /* output */
319 * Atomically decrement a 16-bit counter by one and test.
321 * Atomically decrements the atomic counter (v) by one and returns true if
322 * the result is 0, or false in all other cases.
325 * A pointer to the atomic counter.
327 * True if the result after the decrement operation is 0; false otherwise.
329 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
333 asm volatile(MPLOCKED
336 : [cnt] "+m" (v->cnt), /* output */
343 * Atomically test and set a 16-bit atomic counter.
345 * If the counter value is already set, return 0 (failed). Otherwise, set
346 * the counter value to 1 and return 1 (success).
349 * A pointer to the atomic counter.
351 * 0 if failed; else 1, success.
353 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
355 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
359 * Atomically set a 16-bit counter to 0.
362 * A pointer to the atomic counter.
364 static inline void rte_atomic16_clear(rte_atomic16_t *v)
369 /*------------------------- 32 bit atomic operations -------------------------*/
372 * Atomic compare and set.
374 * (atomic) equivalent to:
376 * *dst = src (all 32-bit words)
379 * The destination location into which the value will be written.
381 * The expected value.
385 * Non-zero on success; 0 on failure.
388 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
394 "cmpxchgl %[src], %[dst];"
396 : [res] "=a" (res), /* output */
398 : [src] "r" (src), /* input */
401 : "memory"); /* no-clobber list */
406 * The atomic counter structure.
409 volatile int32_t cnt; /**< An internal counter value. */
413 * Static initializer for an atomic counter.
415 #define RTE_ATOMIC32_INIT(val) { (val) }
418 * Initialize an atomic counter.
421 * A pointer to the atomic counter.
424 rte_atomic32_init(rte_atomic32_t *v)
430 * Atomically read a 32-bit value from a counter.
433 * A pointer to the atomic counter.
435 * The value of the counter.
437 static inline int32_t
438 rte_atomic32_read(const rte_atomic32_t *v)
444 * Atomically set a counter to a 32-bit value.
447 * A pointer to the atomic counter.
449 * The new value for the counter.
452 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
458 * Atomically add a 32-bit value to an atomic counter.
461 * A pointer to the atomic counter.
463 * The value to be added to the counter.
466 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
470 "addl %[inc], %[cnt]"
471 : [cnt] "=m" (v->cnt) /* output */
472 : [inc] "ir" (inc), /* input */
478 * Atomically subtract a 32-bit value from an atomic counter.
481 * A pointer to the atomic counter.
483 * The value to be subtracted from the counter.
486 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
490 "subl %[dec], %[cnt]"
491 : [cnt] "=m" (v->cnt) /* output */
492 : [dec] "ir" (dec), /* input */
498 * Atomically increment a counter by one.
501 * A pointer to the atomic counter.
504 rte_atomic32_inc(rte_atomic32_t *v)
509 : [cnt] "=m" (v->cnt) /* output */
510 : "m" (v->cnt) /* input */
515 * Atomically decrement a counter by one.
518 * A pointer to the atomic counter.
521 rte_atomic32_dec(rte_atomic32_t *v)
526 : [cnt] "=m" (v->cnt) /* output */
527 : "m" (v->cnt) /* input */
532 * Atomically add a 32-bit value to a counter and return the result.
534 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
535 * returns the value of v after addition.
538 * A pointer to the atomic counter.
540 * The value to be added to the counter.
542 * The value of v after the addition.
544 static inline int32_t
545 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
551 "xaddl %[prev], %[cnt]"
552 : [prev] "+r" (prev), /* output */
554 : "m" (v->cnt) /* input */
556 return (int32_t)(prev + inc);
560 * Atomically subtract a 32-bit value from a counter and return
563 * Atomically subtracts the 32-bit value (inc) from the atomic counter
564 * (v) and returns the value of v after the subtraction.
567 * A pointer to the atomic counter.
569 * The value to be subtracted from the counter.
571 * The value of v after the subtraction.
573 static inline int32_t
574 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
576 return rte_atomic32_add_return(v, -dec);
580 * Atomically increment a 32-bit counter by one and test.
582 * Atomically increments the atomic counter (v) by one and returns true if
583 * the result is 0, or false in all other cases.
586 * A pointer to the atomic counter.
588 * True if the result after the increment operation is 0; false otherwise.
590 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
598 : [cnt] "+m" (v->cnt), /* output */
605 * Atomically decrement a 32-bit counter by one and test.
607 * Atomically decrements the atomic counter (v) by one and returns true if
608 * the result is 0, or false in all other cases.
611 * A pointer to the atomic counter.
613 * True if the result after the decrement operation is 0; false otherwise.
615 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
619 asm volatile(MPLOCKED
622 : [cnt] "+m" (v->cnt), /* output */
629 * Atomically test and set a 32-bit atomic counter.
631 * If the counter value is already set, return 0 (failed). Otherwise, set
632 * the counter value to 1 and return 1 (success).
635 * A pointer to the atomic counter.
637 * 0 if failed; else 1, success.
639 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
641 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
645 * Atomically set a 32-bit counter to 0.
648 * A pointer to the atomic counter.
650 static inline void rte_atomic32_clear(rte_atomic32_t *v)
655 /*------------------------- 64 bit atomic operations -------------------------*/
658 * An atomic compare and set function used by the mutex functions.
659 * (atomic) equivalent to:
661 * *dst = src (all 64-bit words)
664 * The destination into which the value will be written.
666 * The expected value.
670 * Non-zero on success; 0 on failure.
673 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
679 "cmpxchgq %[src], %[dst];"
681 : [res] "=a" (res), /* output */
683 : [src] "r" (src), /* input */
686 : "memory"); /* no-clobber list */
692 * The atomic counter structure.
695 volatile int64_t cnt; /**< Internal counter value. */
699 * Static initializer for an atomic counter.
701 #define RTE_ATOMIC64_INIT(val) { (val) }
704 * Initialize the atomic counter.
707 * A pointer to the atomic counter.
710 rte_atomic64_init(rte_atomic64_t *v)
716 * Atomically read a 64-bit counter.
719 * A pointer to the atomic counter.
721 * The value of the counter.
723 static inline int64_t
724 rte_atomic64_read(rte_atomic64_t *v)
730 * Atomically set a 64-bit counter.
733 * A pointer to the atomic counter.
735 * The new value of the counter.
738 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
744 * Atomically add a 64-bit value to a counter.
747 * A pointer to the atomic counter.
749 * The value to be added to the counter.
752 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
756 "addq %[inc], %[cnt]"
757 : [cnt] "=m" (v->cnt) /* output */
758 : [inc] "ir" (inc), /* input */
764 * Atomically subtract a 64-bit value from a counter.
767 * A pointer to the atomic counter.
769 * The value to be subtracted from the counter.
772 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
776 "subq %[dec], %[cnt]"
777 : [cnt] "=m" (v->cnt) /* output */
778 : [dec] "ir" (dec), /* input */
784 * Atomically increment a 64-bit counter by one and test.
787 * A pointer to the atomic counter.
790 rte_atomic64_inc(rte_atomic64_t *v)
795 : [cnt] "=m" (v->cnt) /* output */
796 : "m" (v->cnt) /* input */
801 * Atomically decrement a 64-bit counter by one and test.
804 * A pointer to the atomic counter.
807 rte_atomic64_dec(rte_atomic64_t *v)
812 : [cnt] "=m" (v->cnt) /* output */
813 : "m" (v->cnt) /* input */
818 * Add a 64-bit value to an atomic counter and return the result.
820 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
821 * returns the value of v after the addition.
824 * A pointer to the atomic counter.
826 * The value to be added to the counter.
828 * The value of v after the addition.
830 static inline int64_t
831 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
837 "xaddq %[prev], %[cnt]"
838 : [prev] "+r" (prev), /* output */
840 : "m" (v->cnt) /* input */
846 * Subtract a 64-bit value from an atomic counter and return the result.
848 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
849 * and returns the value of v after the subtraction.
852 * A pointer to the atomic counter.
854 * The value to be subtracted from the counter.
856 * The value of v after the subtraction.
858 static inline int64_t
859 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
861 return rte_atomic64_add_return(v, -dec);
865 * Atomically increment a 64-bit counter by one and test.
867 * Atomically increments the atomic counter (v) by one and returns
868 * true if the result is 0, or false in all other cases.
871 * A pointer to the atomic counter.
873 * True if the result after the addition is 0; false otherwise.
875 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
883 : [cnt] "+m" (v->cnt), /* output */
891 * Atomically decrement a 64-bit counter by one and test.
893 * Atomically decrements the atomic counter (v) by one and returns true if
894 * the result is 0, or false in all other cases.
897 * A pointer to the atomic counter.
899 * True if the result after subtraction is 0; false otherwise.
901 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
909 : [cnt] "+m" (v->cnt), /* output */
916 * Atomically test and set a 64-bit atomic counter.
918 * If the counter value is already set, return 0 (failed). Otherwise, set
919 * the counter value to 1 and return 1 (success).
922 * A pointer to the atomic counter.
924 * 0 if failed; else 1, success.
926 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
928 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
932 * Atomically set a 64-bit counter to 0.
935 * A pointer to the atomic counter.
937 static inline void rte_atomic64_clear(rte_atomic64_t *v)
942 #endif /* _RTE_X86_64_ATOMIC_H_ */