4 * Copyright(c) 2010-2013 Intel Corporation. All rights reserved.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 * Inspired from FreeBSD src/sys/i386/include/atomic.h
37 * Copyright (c) 1998 Doug Rabson
38 * All rights reserved.
41 #ifndef _RTE_ATOMIC_H_
42 #error "don't include this file directly, please include generic <rte_atomic.h>"
45 #ifndef _RTE_I686_ATOMIC_H_
46 #define _RTE_I686_ATOMIC_H_
50 * Atomic Operations on i686
53 #if RTE_MAX_LCORE == 1
54 #define MPLOCKED /**< No need to insert MP lock prefix. */
56 #define MPLOCKED "lock ; " /**< Insert MP lock prefix. */
60 * General memory barrier.
62 * Guarantees that the LOAD and STORE operations generated before the
63 * barrier occur before the LOAD and STORE operations generated after.
65 #define rte_mb() asm volatile(MPLOCKED "addl $0,(%%esp)" : : : "memory")
68 * Write memory barrier.
70 * Guarantees that the STORE operations generated before the barrier
71 * occur before the STORE operations generated after.
73 #define rte_wmb() asm volatile(MPLOCKED "addl $0,(%%esp)" : : : "memory")
76 * Read memory barrier.
78 * Guarantees that the LOAD operations generated before the barrier
79 * occur before the LOAD operations generated after.
81 #define rte_rmb() asm volatile(MPLOCKED "addl $0,(%%esp)" : : : "memory")
83 /*------------------------- 16 bit atomic operations -------------------------*/
86 * Atomic compare and set.
88 * (atomic) equivalent to:
90 * *dst = src (all 16-bit words)
93 * The destination location into which the value will be written.
99 * Non-zero on success; 0 on failure.
102 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
108 "cmpxchgw %[src], %[dst];"
110 : [res] "=a" (res), /* output */
112 : [src] "r" (src), /* input */
115 : "memory"); /* no-clobber list */
120 * The atomic counter structure.
123 volatile int16_t cnt; /**< An internal counter value. */
127 * Static initializer for an atomic counter.
129 #define RTE_ATOMIC16_INIT(val) { (val) }
132 * Initialize an atomic counter.
135 * A pointer to the atomic counter.
138 rte_atomic16_init(rte_atomic16_t *v)
144 * Atomically read a 16-bit value from a counter.
147 * A pointer to the atomic counter.
149 * The value of the counter.
151 static inline int16_t
152 rte_atomic16_read(const rte_atomic16_t *v)
158 * Atomically set a counter to a 16-bit value.
161 * A pointer to the atomic counter.
163 * The new value for the counter.
166 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
172 * Atomically add a 16-bit value to an atomic counter.
175 * A pointer to the atomic counter.
177 * The value to be added to the counter.
180 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
184 "addw %[inc], %[cnt]"
185 : [cnt] "=m" (v->cnt) /* output */
186 : [inc] "ir" (inc), /* input */
192 * Atomically subtract a 16-bit value from an atomic counter.
195 * A pointer to the atomic counter.
197 * The value to be subtracted from the counter.
200 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
204 "subw %[dec], %[cnt]"
205 : [cnt] "=m" (v->cnt) /* output */
206 : [dec] "ir" (dec), /* input */
212 * Atomically increment a counter by one.
215 * A pointer to the atomic counter.
218 rte_atomic16_inc(rte_atomic16_t *v)
223 : [cnt] "=m" (v->cnt) /* output */
224 : "m" (v->cnt) /* input */
229 * Atomically decrement a counter by one.
232 * A pointer to the atomic counter.
235 rte_atomic16_dec(rte_atomic16_t *v)
240 : [cnt] "=m" (v->cnt) /* output */
241 : "m" (v->cnt) /* input */
246 * Atomically add a 16-bit value to a counter and return the result.
248 * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
249 * returns the value of v after addition.
252 * A pointer to the atomic counter.
254 * The value to be added to the counter.
256 * The value of v after the addition.
258 static inline int16_t
259 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
265 "xaddw %[prev], %[cnt]"
266 : [prev] "+r" (prev), /* output */
268 : "m" (v->cnt) /* input */
270 return (int16_t)(prev + inc);
274 * Atomically subtract a 16-bit value from a counter and return
277 * Atomically subtracts the 16-bit value (inc) from the atomic counter
278 * (v) and returns the value of v after the subtraction.
281 * A pointer to the atomic counter.
283 * The value to be subtracted from the counter.
285 * The value of v after the subtraction.
287 static inline int16_t
288 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
290 return rte_atomic16_add_return(v, (int16_t)-dec);
294 * Atomically increment a 16-bit counter by one and test.
296 * Atomically increments the atomic counter (v) by one and returns true if
297 * the result is 0, or false in all other cases.
300 * A pointer to the atomic counter.
302 * True if the result after the increment operation is 0; false otherwise.
304 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
312 : [cnt] "+m" (v->cnt), /* output */
319 * Atomically decrement a 16-bit counter by one and test.
321 * Atomically decrements the atomic counter (v) by one and returns true if
322 * the result is 0, or false in all other cases.
325 * A pointer to the atomic counter.
327 * True if the result after the decrement operation is 0; false otherwise.
329 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
333 asm volatile(MPLOCKED
336 : [cnt] "+m" (v->cnt), /* output */
343 * Atomically test and set a 16-bit atomic counter.
345 * If the counter value is already set, return 0 (failed). Otherwise, set
346 * the counter value to 1 and return 1 (success).
349 * A pointer to the atomic counter.
351 * 0 if failed; else 1, success.
353 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
355 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
359 * Atomically set a 16-bit counter to 0.
362 * A pointer to the atomic counter.
364 static inline void rte_atomic16_clear(rte_atomic16_t *v)
369 /*------------------------- 32 bit atomic operations -------------------------*/
372 * Atomic compare and set.
374 * (atomic) equivalent to:
376 * *dst = src (all 32-bit words)
379 * The destination location into which the value will be written.
381 * The expected value.
385 * Non-zero on success; 0 on failure.
388 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
394 "cmpxchgl %[src], %[dst];"
396 : [res] "=a" (res), /* output */
398 : [src] "r" (src), /* input */
401 : "memory"); /* no-clobber list */
406 * The atomic counter structure.
409 volatile int32_t cnt; /**< An internal counter value. */
413 * Static initializer for an atomic counter.
415 #define RTE_ATOMIC32_INIT(val) { (val) }
418 * Initialize an atomic counter.
421 * A pointer to the atomic counter.
424 rte_atomic32_init(rte_atomic32_t *v)
430 * Atomically read a 32-bit value from a counter.
433 * A pointer to the atomic counter.
435 * The value of the counter.
437 static inline int32_t
438 rte_atomic32_read(const rte_atomic32_t *v)
444 * Atomically set a counter to a 32-bit value.
447 * A pointer to the atomic counter.
449 * The new value for the counter.
452 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
458 * Atomically add a 32-bit value to an atomic counter.
461 * A pointer to the atomic counter.
463 * The value to be added to the counter.
466 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
470 "addl %[inc], %[cnt]"
471 : [cnt] "=m" (v->cnt) /* output */
472 : [inc] "ir" (inc), /* input */
478 * Atomically subtract a 32-bit value from an atomic counter.
481 * A pointer to the atomic counter.
483 * The value to be subtracted from the counter.
486 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
490 "subl %[dec], %[cnt]"
491 : [cnt] "=m" (v->cnt) /* output */
492 : [dec] "ir" (dec), /* input */
498 * Atomically increment a counter by one.
501 * A pointer to the atomic counter.
504 rte_atomic32_inc(rte_atomic32_t *v)
509 : [cnt] "=m" (v->cnt) /* output */
510 : "m" (v->cnt) /* input */
515 * Atomically decrement a counter by one.
518 * A pointer to the atomic counter.
521 rte_atomic32_dec(rte_atomic32_t *v)
526 : [cnt] "=m" (v->cnt) /* output */
527 : "m" (v->cnt) /* input */
532 * Atomically add a 32-bit value to a counter and return the result.
534 * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
535 * returns the value of v after addition.
538 * A pointer to the atomic counter.
540 * The value to be added to the counter.
542 * The value of v after the addition.
544 static inline int32_t
545 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
551 "xaddl %[prev], %[cnt]"
552 : [prev] "+r" (prev), /* output */
554 : "m" (v->cnt) /* input */
556 return (int32_t)(prev + inc);
560 * Atomically subtract a 32-bit value from a counter and return
563 * Atomically subtracts the 32-bit value (inc) from the atomic counter
564 * (v) and returns the value of v after the subtraction.
567 * A pointer to the atomic counter.
569 * The value to be subtracted from the counter.
571 * The value of v after the subtraction.
573 static inline int32_t
574 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
576 return rte_atomic32_add_return(v, -dec);
580 * Atomically increment a 32-bit counter by one and test.
582 * Atomically increments the atomic counter (v) by one and returns true if
583 * the result is 0, or false in all other cases.
586 * A pointer to the atomic counter.
588 * True if the result after the increment operation is 0; false otherwise.
590 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
598 : [cnt] "+m" (v->cnt), /* output */
605 * Atomically decrement a 32-bit counter by one and test.
607 * Atomically decrements the atomic counter (v) by one and returns true if
608 * the result is 0, or false in all other cases.
611 * A pointer to the atomic counter.
613 * True if the result after the decrement operation is 0; false otherwise.
615 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
619 asm volatile(MPLOCKED
622 : [cnt] "+m" (v->cnt), /* output */
629 * Atomically test and set a 32-bit atomic counter.
631 * If the counter value is already set, return 0 (failed). Otherwise, set
632 * the counter value to 1 and return 1 (success).
635 * A pointer to the atomic counter.
637 * 0 if failed; else 1, success.
639 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
641 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
645 * Atomically set a 32-bit counter to 0.
648 * A pointer to the atomic counter.
650 static inline void rte_atomic32_clear(rte_atomic32_t *v)
655 /*------------------------- 64 bit atomic operations -------------------------*/
658 * An atomic compare and set function used by the mutex functions.
659 * (atomic) equivalent to:
661 * *dst = src (all 64-bit words)
664 * The destination into which the value will be written.
666 * The expected value.
670 * Non-zero on success; 0 on failure.
673 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
689 "cmpxchg8b (%[dst]);"
691 : [res] "=a" (res) /* result in eax */
692 : [dst] "S" (dst), /* esi */
693 "b" (_src.l32), /* ebx */
694 "c" (_src.h32), /* ecx */
695 "a" (_exp.l32), /* eax */
696 "d" (_exp.h32) /* edx */
697 : "memory" ); /* no-clobber list */
703 * The atomic counter structure.
706 volatile int64_t cnt; /**< Internal counter value. */
710 * Static initializer for an atomic counter.
712 #define RTE_ATOMIC64_INIT(val) { (val) }
715 * Initialize the atomic counter.
718 * A pointer to the atomic counter.
721 rte_atomic64_init(rte_atomic64_t *v)
726 while (success == 0) {
728 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
734 * Atomically read a 64-bit counter.
737 * A pointer to the atomic counter.
739 * The value of the counter.
741 static inline int64_t
742 rte_atomic64_read(rte_atomic64_t *v)
747 while (success == 0) {
749 /* replace the value by itself */
750 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
757 * Atomically set a 64-bit counter.
760 * A pointer to the atomic counter.
762 * The new value of the counter.
765 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
770 while (success == 0) {
772 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
778 * Atomically add a 64-bit value to a counter.
781 * A pointer to the atomic counter.
783 * The value to be added to the counter.
786 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
791 while (success == 0) {
793 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
799 * Atomically subtract a 64-bit value from a counter.
802 * A pointer to the atomic counter.
804 * The value to be substracted from the counter.
807 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
812 while (success == 0) {
814 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
820 * Atomically increment a 64-bit counter by one and test.
823 * A pointer to the atomic counter.
826 rte_atomic64_inc(rte_atomic64_t *v)
828 rte_atomic64_add(v, 1);
832 * Atomically decrement a 64-bit counter by one and test.
835 * A pointer to the atomic counter.
838 rte_atomic64_dec(rte_atomic64_t *v)
840 rte_atomic64_sub(v, 1);
844 * Add a 64-bit value to an atomic counter and return the result.
846 * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
847 * returns the value of v after the addition.
850 * A pointer to the atomic counter.
852 * The value to be added to the counter.
854 * The value of v after the addition.
856 static inline int64_t
857 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
862 while (success == 0) {
864 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
872 * Subtract a 64-bit value from an atomic counter and return the result.
874 * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
875 * and returns the value of v after the substraction.
878 * A pointer to the atomic counter.
880 * The value to be substracted from the counter.
882 * The value of v after the substraction.
884 static inline int64_t
885 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
890 while (success == 0) {
892 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
900 * Atomically increment a 64-bit counter by one and test.
902 * Atomically increments the atomic counter (v) by one and returns
903 * true if the result is 0, or false in all other cases.
906 * A pointer to the atomic counter.
908 * True if the result after the addition is 0; false otherwise.
910 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
912 return rte_atomic64_add_return(v, 1) == 0;
916 * Atomically decrement a 64-bit counter by one and test.
918 * Atomically decrements the atomic counter (v) by one and returns true if
919 * the result is 0, or false in all other cases.
922 * A pointer to the atomic counter.
924 * True if the result after substraction is 0; false otherwise.
926 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
928 return rte_atomic64_sub_return(v, 1) == 0;
932 * Atomically test and set a 64-bit atomic counter.
934 * If the counter value is already set, return 0 (failed). Otherwise, set
935 * the counter value to 1 and return 1 (success).
938 * A pointer to the atomic counter.
940 * 0 if failed; else 1, success.
942 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
944 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
948 * Atomically set a 64-bit counter to 0.
951 * A pointer to the atomic counter.
953 static inline void rte_atomic64_clear(rte_atomic64_t *v)
955 rte_atomic64_set(v, 0);
958 #endif /* _RTE_I686_ATOMIC_H_ */