4 * Copyright(c) 2010-2012 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 * version: DPDK.L.1.2.3-3
37 * Inspired from FreeBSD src/sys/i386/include/atomic.h
38 * Copyright (c) 1998 Doug Rabson
39 * All rights reserved.
42 #ifndef _RTE_ATOMIC_H_
43 #error "don't include this file directly, please include generic <rte_atomic.h>"
46 #ifndef _RTE_I686_ATOMIC_H_
47 #define _RTE_I686_ATOMIC_H_
51 * Atomic Operations on i686
54 #if RTE_MAX_LCORE == 1
55 #define MPLOCKED /**< No need to insert MP lock prefix. */
57 #define MPLOCKED "lock ; " /**< Insert MP lock prefix. */
61 * General memory barrier.
63 * Guarantees that the LOAD and STORE operations generated before the
64 * barrier occur before the LOAD and STORE operations generated after.
66 #define rte_mb() asm volatile(MPLOCKED "addl $0,(%%esp)" : : : "memory")
69 * Write memory barrier.
71 * Guarantees that the STORE operations generated before the barrier
72 * occur before the STORE operations generated after.
74 #define rte_wmb() asm volatile(MPLOCKED "addl $0,(%%esp)" : : : "memory")
77 * Read memory barrier.
79 * Guarantees that the LOAD operations generated before the barrier
80 * occur before the LOAD operations generated after.
82 #define rte_rmb() asm volatile(MPLOCKED "addl $0,(%%esp)" : : : "memory")
84 /*------------------------- 16 bit atomic operations -------------------------*/
87 * Atomic compare and set.
89 * (atomic) equivalent to:
91 * *dst = src (all 16-bit words)
94 * The destination location into which the value will be written.
100 * Non-zero on success; 0 on failure.
103 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
109 "cmpxchgw %[src], %[dst];"
111 : [res] "=a" (res), /* output */
113 : [src] "r" (src), /* input */
116 : "memory"); /* no-clobber list */
121 * The atomic counter structure.
124 volatile int16_t cnt; /**< An internal counter value. */
128 * Static initializer for an atomic counter.
130 #define RTE_ATOMIC16_INIT(val) { (val) }
133 * Initialize an atomic counter.
136 * A pointer to the atomic counter.
139 rte_atomic16_init(rte_atomic16_t *v)
145 * Atomically read a 16-bit value from a counter.
148 * A pointer to the atomic counter.
150 * The value of the counter.
152 static inline int16_t
153 rte_atomic16_read(const rte_atomic16_t *v)
159 * Atomically set a counter to a 16-bit value.
162 * A pointer to the atomic counter.
164 * The new value for the counter.
167 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
173 * Atomically add a 16-bit value to an atomic counter.
176 * A pointer to the atomic counter.
178 * The value to be added to the counter.
181 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
185 "addw %[inc], %[cnt]"
186 : [cnt] "=m" (v->cnt) /* output */
187 : [inc] "ir" (inc), /* input */
193 * Atomically subtract a 16-bit value from an atomic counter.
196 * A pointer to the atomic counter.
198 * The value to be subtracted from the counter.
201 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
205 "subw %[dec], %[cnt]"
206 : [cnt] "=m" (v->cnt) /* output */
207 : [dec] "ir" (dec), /* input */
213 * Atomically increment a counter by one.
216 * A pointer to the atomic counter.
219 rte_atomic16_inc(rte_atomic16_t *v)
224 : [cnt] "=m" (v->cnt) /* output */
225 : "m" (v->cnt) /* input */
230 * Atomically decrement a counter by one.
233 * A pointer to the atomic counter.
236 rte_atomic16_dec(rte_atomic16_t *v)
241 : [cnt] "=m" (v->cnt) /* output */
242 : "m" (v->cnt) /* input */
247 * Atomically add a 16-bit value to a counter and return the result.
249 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
250 * returns the value of v after addition.
253 * A pointer to the atomic counter.
255 * The value to be added to the counter.
257 * The value of v after the addition.
259 static inline int16_t
260 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
266 "xaddw %[prev], %[cnt]"
267 : [prev] "+r" (prev), /* output */
269 : "m" (v->cnt) /* input */
271 return (int16_t)(prev + inc);
275 * Atomically subtract a 16-bit value from a counter and return
278 * Atomically subtracts the 16-bit value (inc) from the atomic counter
279 * (v) and returns the value of v after the subtraction.
282 * A pointer to the atomic counter.
284 * The value to be subtracted from the counter.
286 * The value of v after the subtraction.
288 static inline int16_t
289 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
291 return rte_atomic16_add_return(v, (int16_t)-dec);
295 * Atomically increment a 16-bit counter by one and test.
297 * Atomically increments the atomic counter (v) by one and returns true if
298 * the result is 0, or false in all other cases.
301 * A pointer to the atomic counter.
303 * True if the result after the increment operation is 0; false otherwise.
305 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
313 : [cnt] "+m" (v->cnt), /* output */
320 * Atomically decrement a 16-bit counter by one and test.
322 * Atomically decrements the atomic counter (v) by one and returns true if
323 * the result is 0, or false in all other cases.
326 * A pointer to the atomic counter.
328 * True if the result after the decrement operation is 0; false otherwise.
330 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
334 asm volatile(MPLOCKED
337 : [cnt] "+m" (v->cnt), /* output */
344 * Atomically test and set a 16-bit atomic counter.
346 * If the counter value is already set, return 0 (failed). Otherwise, set
347 * the counter value to 1 and return 1 (success).
350 * A pointer to the atomic counter.
352 * 0 if failed; else 1, success.
354 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
356 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
360 * Atomically set a 16-bit counter to 0.
363 * A pointer to the atomic counter.
365 static inline void rte_atomic16_clear(rte_atomic16_t *v)
370 /*------------------------- 32 bit atomic operations -------------------------*/
373 * Atomic compare and set.
375 * (atomic) equivalent to:
377 * *dst = src (all 32-bit words)
380 * The destination location into which the value will be written.
382 * The expected value.
386 * Non-zero on success; 0 on failure.
389 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
395 "cmpxchgl %[src], %[dst];"
397 : [res] "=a" (res), /* output */
399 : [src] "r" (src), /* input */
402 : "memory"); /* no-clobber list */
407 * The atomic counter structure.
410 volatile int32_t cnt; /**< An internal counter value. */
414 * Static initializer for an atomic counter.
416 #define RTE_ATOMIC32_INIT(val) { (val) }
419 * Initialize an atomic counter.
422 * A pointer to the atomic counter.
425 rte_atomic32_init(rte_atomic32_t *v)
431 * Atomically read a 32-bit value from a counter.
434 * A pointer to the atomic counter.
436 * The value of the counter.
438 static inline int32_t
439 rte_atomic32_read(const rte_atomic32_t *v)
445 * Atomically set a counter to a 32-bit value.
448 * A pointer to the atomic counter.
450 * The new value for the counter.
453 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
459 * Atomically add a 32-bit value to an atomic counter.
462 * A pointer to the atomic counter.
464 * The value to be added to the counter.
467 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
471 "addl %[inc], %[cnt]"
472 : [cnt] "=m" (v->cnt) /* output */
473 : [inc] "ir" (inc), /* input */
479 * Atomically subtract a 32-bit value from an atomic counter.
482 * A pointer to the atomic counter.
484 * The value to be subtracted from the counter.
487 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
491 "subl %[dec], %[cnt]"
492 : [cnt] "=m" (v->cnt) /* output */
493 : [dec] "ir" (dec), /* input */
499 * Atomically increment a counter by one.
502 * A pointer to the atomic counter.
505 rte_atomic32_inc(rte_atomic32_t *v)
510 : [cnt] "=m" (v->cnt) /* output */
511 : "m" (v->cnt) /* input */
516 * Atomically decrement a counter by one.
519 * A pointer to the atomic counter.
522 rte_atomic32_dec(rte_atomic32_t *v)
527 : [cnt] "=m" (v->cnt) /* output */
528 : "m" (v->cnt) /* input */
533 * Atomically add a 32-bit value to a counter and return the result.
535 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
536 * returns the value of v after addition.
539 * A pointer to the atomic counter.
541 * The value to be added to the counter.
543 * The value of v after the addition.
545 static inline int32_t
546 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
552 "xaddl %[prev], %[cnt]"
553 : [prev] "+r" (prev), /* output */
555 : "m" (v->cnt) /* input */
557 return (int32_t)(prev + inc);
561 * Atomically subtract a 32-bit value from a counter and return
564 * Atomically subtracts the 32-bit value (inc) from the atomic counter
565 * (v) and returns the value of v after the subtraction.
568 * A pointer to the atomic counter.
570 * The value to be subtracted from the counter.
572 * The value of v after the subtraction.
574 static inline int32_t
575 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
577 return rte_atomic32_add_return(v, -dec);
581 * Atomically increment a 32-bit counter by one and test.
583 * Atomically increments the atomic counter (v) by one and returns true if
584 * the result is 0, or false in all other cases.
587 * A pointer to the atomic counter.
589 * True if the result after the increment operation is 0; false otherwise.
591 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
599 : [cnt] "+m" (v->cnt), /* output */
606 * Atomically decrement a 32-bit counter by one and test.
608 * Atomically decrements the atomic counter (v) by one and returns true if
609 * the result is 0, or false in all other cases.
612 * A pointer to the atomic counter.
614 * True if the result after the decrement operation is 0; false otherwise.
616 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
620 asm volatile(MPLOCKED
623 : [cnt] "+m" (v->cnt), /* output */
630 * Atomically test and set a 32-bit atomic counter.
632 * If the counter value is already set, return 0 (failed). Otherwise, set
633 * the counter value to 1 and return 1 (success).
636 * A pointer to the atomic counter.
638 * 0 if failed; else 1, success.
640 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
642 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
646 * Atomically set a 32-bit counter to 0.
649 * A pointer to the atomic counter.
651 static inline void rte_atomic32_clear(rte_atomic32_t *v)
656 /*------------------------- 64 bit atomic operations -------------------------*/
659 * An atomic compare and set function used by the mutex functions.
660 * (atomic) equivalent to:
662 * *dst = src (all 64-bit words)
665 * The destination into which the value will be written.
667 * The expected value.
671 * Non-zero on success; 0 on failure.
674 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
690 "cmpxchg8b (%[dst]);"
692 : [res] "=a" (res) /* result in eax */
693 : [dst] "S" (dst), /* esi */
694 "b" (_src.l32), /* ebx */
695 "c" (_src.h32), /* ecx */
696 "a" (_exp.l32), /* eax */
697 "d" (_exp.h32) /* edx */
698 : "memory" ); /* no-clobber list */
704 * The atomic counter structure.
707 volatile int64_t cnt; /**< Internal counter value. */
711 * Static initializer for an atomic counter.
713 #define RTE_ATOMIC64_INIT(val) { (val) }
716 * Initialize the atomic counter.
719 * A pointer to the atomic counter.
722 rte_atomic64_init(rte_atomic64_t *v)
727 while (success == 0) {
729 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
735 * Atomically read a 64-bit counter.
738 * A pointer to the atomic counter.
740 * The value of the counter.
742 static inline int64_t
743 rte_atomic64_read(rte_atomic64_t *v)
748 while (success == 0) {
750 /* replace the value by itself */
751 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
758 * Atomically set a 64-bit counter.
761 * A pointer to the atomic counter.
763 * The new value of the counter.
766 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
771 while (success == 0) {
773 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
779 * Atomically add a 64-bit value to a counter.
782 * A pointer to the atomic counter.
784 * The value to be added to the counter.
787 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
792 while (success == 0) {
794 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
800 * Atomically subtract a 64-bit value from a counter.
803 * A pointer to the atomic counter.
805 * The value to be substracted from the counter.
808 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
813 while (success == 0) {
815 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
821 * Atomically increment a 64-bit counter by one and test.
824 * A pointer to the atomic counter.
827 rte_atomic64_inc(rte_atomic64_t *v)
829 rte_atomic64_add(v, 1);
833 * Atomically decrement a 64-bit counter by one and test.
836 * A pointer to the atomic counter.
839 rte_atomic64_dec(rte_atomic64_t *v)
841 rte_atomic64_sub(v, 1);
845 * Add a 64-bit value to an atomic counter and return the result.
847 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
848 * returns the value of v after the addition.
851 * A pointer to the atomic counter.
853 * The value to be added to the counter.
855 * The value of v after the addition.
857 static inline int64_t
858 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
863 while (success == 0) {
865 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
873 * Subtract a 64-bit value from an atomic counter and return the result.
875 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
876 * and returns the value of v after the substraction.
879 * A pointer to the atomic counter.
881 * The value to be substracted from the counter.
883 * The value of v after the substraction.
885 static inline int64_t
886 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
891 while (success == 0) {
893 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
901 * Atomically increment a 64-bit counter by one and test.
903 * Atomically increments the atomic counter (v) by one and returns
904 * true if the result is 0, or false in all other cases.
907 * A pointer to the atomic counter.
909 * True if the result after the addition is 0; false otherwise.
911 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
913 return rte_atomic64_add_return(v, 1) == 0;
917 * Atomically decrement a 64-bit counter by one and test.
919 * Atomically decrements the atomic counter (v) by one and returns true if
920 * the result is 0, or false in all other cases.
923 * A pointer to the atomic counter.
925 * True if the result after substraction is 0; false otherwise.
927 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
929 return rte_atomic64_sub_return(v, 1) == 0;
933 * Atomically test and set a 64-bit atomic counter.
935 * If the counter value is already set, return 0 (failed). Otherwise, set
936 * the counter value to 1 and return 1 (success).
939 * A pointer to the atomic counter.
941 * 0 if failed; else 1, success.
943 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
945 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
949 * Atomically set a 64-bit counter to 0.
952 * A pointer to the atomic counter.
954 static inline void rte_atomic64_clear(rte_atomic64_t *v)
956 rte_atomic64_set(v, 0);
959 #endif /* _RTE_I686_ATOMIC_H_ */