4 * Copyright(c) 2010-2012 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 * version: DPDK.L.1.2.3-3
37 * Inspired from FreeBSD src/sys/amd64/include/atomic.h
38 * Copyright (c) 1998 Doug Rabson
39 * All rights reserved.
42 #ifndef _RTE_ATOMIC_H_
43 #error "don't include this file directly, please include generic <rte_atomic.h>"
46 #ifndef _RTE_X86_64_ATOMIC_H_
47 #define _RTE_X86_64_ATOMIC_H_
51 * Atomic Operations on x86_64
54 #if RTE_MAX_LCORE == 1
55 #define MPLOCKED /**< No need to insert MP lock prefix. */
57 #define MPLOCKED "lock ; " /**< Insert MP lock prefix. */
61 * General memory barrier.
63 * Guarantees that the LOAD and STORE operations generated before the
64 * barrier occur before the LOAD and STORE operations generated after.
66 #define rte_mb() asm volatile("mfence;" : : : "memory")
69 * Write memory barrier.
71 * Guarantees that the STORE operations generated before the barrier
72 * occur before the STORE operations generated after.
74 #define rte_wmb() asm volatile("sfence;" : : : "memory")
77 * Read memory barrier.
79 * Guarantees that the LOAD operations generated before the barrier
80 * occur before the LOAD operations generated after.
82 #define rte_rmb() asm volatile("lfence;" : : : "memory")
84 /*------------------------- 16 bit atomic operations -------------------------*/
87 * Atomic compare and set.
89 * (atomic) equivalent to:
91 * *dst = src (all 16-bit words)
94 * The destination location into which the value will be written.
100 * Non-zero on success; 0 on failure.
103 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
109 "cmpxchgw %[src], %[dst];"
111 : [res] "=a" (res), /* output */
113 : [src] "r" (src), /* input */
116 : "memory"); /* no-clobber list */
121 * The atomic counter structure.
124 volatile int16_t cnt; /**< An internal counter value. */
128 * Static initializer for an atomic counter.
130 #define RTE_ATOMIC16_INIT(val) { (val) }
133 * Initialize an atomic counter.
136 * A pointer to the atomic counter.
139 rte_atomic16_init(rte_atomic16_t *v)
145 * Atomically read a 16-bit value from a counter.
148 * A pointer to the atomic counter.
150 * The value of the counter.
152 static inline int16_t
153 rte_atomic16_read(const rte_atomic16_t *v)
159 * Atomically set a counter to a 16-bit value.
162 * A pointer to the atomic counter.
164 * The new value for the counter.
167 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
173 * Atomically add a 16-bit value to an atomic counter.
176 * A pointer to the atomic counter.
178 * The value to be added to the counter.
181 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
185 "addw %[inc], %[cnt]"
186 : [cnt] "=m" (v->cnt) /* output */
187 : [inc] "ir" (inc), /* input */
193 * Atomically subtract a 16-bit value from an atomic counter.
196 * A pointer to the atomic counter.
198 * The value to be subtracted from the counter.
201 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
205 "subw %[dec], %[cnt]"
206 : [cnt] "=m" (v->cnt) /* output */
207 : [dec] "ir" (dec), /* input */
213 * Atomically increment a counter by one.
216 * A pointer to the atomic counter.
219 rte_atomic16_inc(rte_atomic16_t *v)
224 : [cnt] "=m" (v->cnt) /* output */
225 : "m" (v->cnt) /* input */
230 * Atomically decrement a counter by one.
233 * A pointer to the atomic counter.
236 rte_atomic16_dec(rte_atomic16_t *v)
241 : [cnt] "=m" (v->cnt) /* output */
242 : "m" (v->cnt) /* input */
247 * Atomically add a 16-bit value to a counter and return the result.
249 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
250 * returns the value of v after addition.
253 * A pointer to the atomic counter.
255 * The value to be added to the counter.
257 * The value of v after the addition.
259 static inline int16_t
260 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
266 "xaddw %[prev], %[cnt]"
267 : [prev] "+r" (prev), /* output */
269 : "m" (v->cnt) /* input */
271 return (int16_t)(prev + inc);
275 * Atomically subtract a 16-bit value from a counter and return
278 * Atomically subtracts the 16-bit value (inc) from the atomic counter
279 * (v) and returns the value of v after the subtraction.
282 * A pointer to the atomic counter.
284 * The value to be subtracted from the counter.
286 * The value of v after the subtraction.
288 static inline int16_t
289 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
291 return rte_atomic16_add_return(v, (int16_t)-dec);
295 * Atomically increment a 16-bit counter by one and test.
297 * Atomically increments the atomic counter (v) by one and returns true if
298 * the result is 0, or false in all other cases.
301 * A pointer to the atomic counter.
303 * True if the result after the increment operation is 0; false otherwise.
305 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
313 : [cnt] "+m" (v->cnt), /* output */
320 * Atomically decrement a 16-bit counter by one and test.
322 * Atomically decrements the atomic counter (v) by one and returns true if
323 * the result is 0, or false in all other cases.
326 * A pointer to the atomic counter.
328 * True if the result after the decrement operation is 0; false otherwise.
330 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
334 asm volatile(MPLOCKED
337 : [cnt] "+m" (v->cnt), /* output */
344 * Atomically test and set a 16-bit atomic counter.
346 * If the counter value is already set, return 0 (failed). Otherwise, set
347 * the counter value to 1 and return 1 (success).
350 * A pointer to the atomic counter.
352 * 0 if failed; else 1, success.
354 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
356 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
360 * Atomically set a 16-bit counter to 0.
363 * A pointer to the atomic counter.
365 static inline void rte_atomic16_clear(rte_atomic16_t *v)
370 /*------------------------- 32 bit atomic operations -------------------------*/
373 * Atomic compare and set.
375 * (atomic) equivalent to:
377 * *dst = src (all 32-bit words)
380 * The destination location into which the value will be written.
382 * The expected value.
386 * Non-zero on success; 0 on failure.
389 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
395 "cmpxchgl %[src], %[dst];"
397 : [res] "=a" (res), /* output */
399 : [src] "r" (src), /* input */
402 : "memory"); /* no-clobber list */
407 * The atomic counter structure.
410 volatile int32_t cnt; /**< An internal counter value. */
414 * Static initializer for an atomic counter.
416 #define RTE_ATOMIC32_INIT(val) { (val) }
419 * Initialize an atomic counter.
422 * A pointer to the atomic counter.
425 rte_atomic32_init(rte_atomic32_t *v)
431 * Atomically read a 32-bit value from a counter.
434 * A pointer to the atomic counter.
436 * The value of the counter.
438 static inline int32_t
439 rte_atomic32_read(const rte_atomic32_t *v)
445 * Atomically set a counter to a 32-bit value.
448 * A pointer to the atomic counter.
450 * The new value for the counter.
453 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
459 * Atomically add a 32-bit value to an atomic counter.
462 * A pointer to the atomic counter.
464 * The value to be added to the counter.
467 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
471 "addl %[inc], %[cnt]"
472 : [cnt] "=m" (v->cnt) /* output */
473 : [inc] "ir" (inc), /* input */
479 * Atomically subtract a 32-bit value from an atomic counter.
482 * A pointer to the atomic counter.
484 * The value to be subtracted from the counter.
487 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
491 "subl %[dec], %[cnt]"
492 : [cnt] "=m" (v->cnt) /* output */
493 : [dec] "ir" (dec), /* input */
499 * Atomically increment a counter by one.
502 * A pointer to the atomic counter.
505 rte_atomic32_inc(rte_atomic32_t *v)
510 : [cnt] "=m" (v->cnt) /* output */
511 : "m" (v->cnt) /* input */
516 * Atomically decrement a counter by one.
519 * A pointer to the atomic counter.
522 rte_atomic32_dec(rte_atomic32_t *v)
527 : [cnt] "=m" (v->cnt) /* output */
528 : "m" (v->cnt) /* input */
533 * Atomically add a 32-bit value to a counter and return the result.
535 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
536 * returns the value of v after addition.
539 * A pointer to the atomic counter.
541 * The value to be added to the counter.
543 * The value of v after the addition.
545 static inline int32_t
546 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
552 "xaddl %[prev], %[cnt]"
553 : [prev] "+r" (prev), /* output */
555 : "m" (v->cnt) /* input */
557 return (int32_t)(prev + inc);
561 * Atomically subtract a 32-bit value from a counter and return
564 * Atomically subtracts the 32-bit value (inc) from the atomic counter
565 * (v) and returns the value of v after the subtraction.
568 * A pointer to the atomic counter.
570 * The value to be subtracted from the counter.
572 * The value of v after the subtraction.
574 static inline int32_t
575 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
577 return rte_atomic32_add_return(v, -dec);
581 * Atomically increment a 32-bit counter by one and test.
583 * Atomically increments the atomic counter (v) by one and returns true if
584 * the result is 0, or false in all other cases.
587 * A pointer to the atomic counter.
589 * True if the result after the increment operation is 0; false otherwise.
591 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
599 : [cnt] "+m" (v->cnt), /* output */
606 * Atomically decrement a 32-bit counter by one and test.
608 * Atomically decrements the atomic counter (v) by one and returns true if
609 * the result is 0, or false in all other cases.
612 * A pointer to the atomic counter.
614 * True if the result after the decrement operation is 0; false otherwise.
616 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
620 asm volatile(MPLOCKED
623 : [cnt] "+m" (v->cnt), /* output */
630 * Atomically test and set a 32-bit atomic counter.
632 * If the counter value is already set, return 0 (failed). Otherwise, set
633 * the counter value to 1 and return 1 (success).
636 * A pointer to the atomic counter.
638 * 0 if failed; else 1, success.
640 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
642 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
646 * Atomically set a 32-bit counter to 0.
649 * A pointer to the atomic counter.
651 static inline void rte_atomic32_clear(rte_atomic32_t *v)
656 /*------------------------- 64 bit atomic operations -------------------------*/
659 * An atomic compare and set function used by the mutex functions.
660 * (atomic) equivalent to:
662 * *dst = src (all 64-bit words)
665 * The destination into which the value will be written.
667 * The expected value.
671 * Non-zero on success; 0 on failure.
674 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
680 "cmpxchgq %[src], %[dst];"
682 : [res] "=a" (res), /* output */
684 : [src] "r" (src), /* input */
687 : "memory"); /* no-clobber list */
693 * The atomic counter structure.
696 volatile int64_t cnt; /**< Internal counter value. */
700 * Static initializer for an atomic counter.
702 #define RTE_ATOMIC64_INIT(val) { (val) }
705 * Initialize the atomic counter.
708 * A pointer to the atomic counter.
711 rte_atomic64_init(rte_atomic64_t *v)
717 * Atomically read a 64-bit counter.
720 * A pointer to the atomic counter.
722 * The value of the counter.
724 static inline int64_t
725 rte_atomic64_read(rte_atomic64_t *v)
731 * Atomically set a 64-bit counter.
734 * A pointer to the atomic counter.
736 * The new value of the counter.
739 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
745 * Atomically add a 64-bit value to a counter.
748 * A pointer to the atomic counter.
750 * The value to be added to the counter.
753 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
757 "addq %[inc], %[cnt]"
758 : [cnt] "=m" (v->cnt) /* output */
759 : [inc] "ir" (inc), /* input */
765 * Atomically subtract a 64-bit value from a counter.
768 * A pointer to the atomic counter.
770 * The value to be subtracted from the counter.
773 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
777 "subq %[dec], %[cnt]"
778 : [cnt] "=m" (v->cnt) /* output */
779 : [dec] "ir" (dec), /* input */
785 * Atomically increment a 64-bit counter by one and test.
788 * A pointer to the atomic counter.
791 rte_atomic64_inc(rte_atomic64_t *v)
796 : [cnt] "=m" (v->cnt) /* output */
797 : "m" (v->cnt) /* input */
802 * Atomically decrement a 64-bit counter by one and test.
805 * A pointer to the atomic counter.
808 rte_atomic64_dec(rte_atomic64_t *v)
813 : [cnt] "=m" (v->cnt) /* output */
814 : "m" (v->cnt) /* input */
819 * Add a 64-bit value to an atomic counter and return the result.
821 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
822 * returns the value of v after the addition.
825 * A pointer to the atomic counter.
827 * The value to be added to the counter.
829 * The value of v after the addition.
831 static inline int64_t
832 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
838 "xaddq %[prev], %[cnt]"
839 : [prev] "+r" (prev), /* output */
841 : "m" (v->cnt) /* input */
847 * Subtract a 64-bit value from an atomic counter and return the result.
849 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
850 * and returns the value of v after the subtraction.
853 * A pointer to the atomic counter.
855 * The value to be subtracted from the counter.
857 * The value of v after the subtraction.
859 static inline int64_t
860 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
862 return rte_atomic64_add_return(v, -dec);
866 * Atomically increment a 64-bit counter by one and test.
868 * Atomically increments the atomic counter (v) by one and returns
869 * true if the result is 0, or false in all other cases.
872 * A pointer to the atomic counter.
874 * True if the result after the addition is 0; false otherwise.
876 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
884 : [cnt] "+m" (v->cnt), /* output */
892 * Atomically decrement a 64-bit counter by one and test.
894 * Atomically decrements the atomic counter (v) by one and returns true if
895 * the result is 0, or false in all other cases.
898 * A pointer to the atomic counter.
900 * True if the result after subtraction is 0; false otherwise.
902 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
910 : [cnt] "+m" (v->cnt), /* output */
917 * Atomically test and set a 64-bit atomic counter.
919 * If the counter value is already set, return 0 (failed). Otherwise, set
920 * the counter value to 1 and return 1 (success).
923 * A pointer to the atomic counter.
925 * 0 if failed; else 1, success.
927 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
929 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
933 * Atomically set a 64-bit counter to 0.
936 * A pointer to the atomic counter.
938 static inline void rte_atomic64_clear(rte_atomic64_t *v)
943 #endif /* _RTE_X86_64_ATOMIC_H_ */