From 4b2fd987d8bf065487873837bda9b30ab914fbb7 Mon Sep 17 00:00:00 2001 From: David Marchand Date: Tue, 28 Oct 2014 13:50:48 +0100 Subject: [PATCH] eal: split atomic operations to architecture specific This patch first adds architecture specific directories to eal. Then split the atomic operations to architecture specific and generic files. Architecture specific files are put into the corresponding architecture directory and common header are put into generic directory. Update documentation generation with new generic/ directory. Signed-off-by: Chao Zhu Signed-off-by: David Marchand Acked-by: Thomas Monjalon --- doc/api/doxy-api.conf | 1 + lib/librte_eal/common/Makefile | 11 +- .../{i686/arch => arch/i686}/rte_atomic.h | 322 ++++++------ .../{x86_64/arch => arch/x86_64}/rte_atomic.h | 321 ++++++------ .../common/include/{ => generic}/rte_atomic.h | 467 +++++------------- 5 files changed, 479 insertions(+), 643 deletions(-) rename lib/librte_eal/common/include/{i686/arch => arch/i686}/rte_atomic.h (60%) rename lib/librte_eal/common/include/{x86_64/arch => arch/x86_64}/rte_atomic.h (55%) rename lib/librte_eal/common/include/{ => generic}/rte_atomic.h (74%) diff --git a/doc/api/doxy-api.conf b/doc/api/doxy-api.conf index fe3879fa0c..27c782cd6c 100644 --- a/doc/api/doxy-api.conf +++ b/doc/api/doxy-api.conf @@ -31,6 +31,7 @@ PROJECT_NAME = DPDK INPUT = doc/api/doxy-api-index.md \ lib/librte_eal/common/include \ + lib/librte_eal/common/include/generic \ lib/librte_acl \ lib/librte_distributor \ lib/librte_ether \ diff --git a/lib/librte_eal/common/Makefile b/lib/librte_eal/common/Makefile index 7f279666fb..8ab363b9fe 100644 --- a/lib/librte_eal/common/Makefile +++ b/lib/librte_eal/common/Makefile @@ -31,7 +31,7 @@ include $(RTE_SDK)/mk/rte.vars.mk -INC := rte_atomic.h rte_branch_prediction.h rte_byteorder.h rte_common.h +INC := rte_branch_prediction.h rte_byteorder.h rte_common.h INC += rte_cycles.h rte_debug.h rte_eal.h rte_errno.h rte_launch.h rte_lcore.h INC += rte_log.h rte_memcpy.h rte_memory.h rte_memzone.h rte_pci.h INC += rte_pci_dev_ids.h rte_per_lcore.h rte_prefetch.h rte_random.h @@ -46,11 +46,14 @@ ifeq ($(CONFIG_RTE_INSECURE_FUNCTION_WARNING),y) INC += rte_warnings.h endif -ARCH_INC := rte_atomic.h +GENERIC_INC := rte_atomic.h +ARCH_INC := $(GENERIC_INC) SYMLINK-$(CONFIG_RTE_LIBRTE_EAL)-include := $(addprefix include/,$(INC)) -SYMLINK-$(CONFIG_RTE_LIBRTE_EAL)-include/arch := \ - $(addprefix include/$(RTE_ARCH)/arch/,$(ARCH_INC)) +SYMLINK-$(CONFIG_RTE_LIBRTE_EAL)-include += \ + $(addprefix include/arch/$(RTE_ARCH)/,$(ARCH_INC)) +SYMLINK-$(CONFIG_RTE_LIBRTE_EAL)-include/generic := \ + $(addprefix include/generic/,$(GENERIC_INC)) # add libc if configured DEPDIRS-$(CONFIG_RTE_LIBC) += lib/libc diff --git a/lib/librte_eal/common/include/i686/arch/rte_atomic.h b/lib/librte_eal/common/include/arch/i686/rte_atomic.h similarity index 60% rename from lib/librte_eal/common/include/i686/arch/rte_atomic.h rename to lib/librte_eal/common/include/arch/i686/rte_atomic.h index 6956b87042..8330250a09 100644 --- a/lib/librte_eal/common/include/i686/arch/rte_atomic.h +++ b/lib/librte_eal/common/include/arch/i686/rte_atomic.h @@ -37,37 +37,179 @@ * All rights reserved. */ -#ifndef _RTE_ATOMIC_H_ -#error "don't include this file directly, please include generic " +#ifndef _RTE_ATOMIC_I686_H_ +#define _RTE_ATOMIC_I686_H_ + +#ifdef __cplusplus +extern "C" { #endif -#ifndef _RTE_I686_ATOMIC_H_ -#define _RTE_I686_ATOMIC_H_ +#include +#include "generic/rte_atomic.h" +#if RTE_MAX_LCORE == 1 +#define MPLOCKED /**< No need to insert MP lock prefix. */ +#else +#define MPLOCKED "lock ; " /**< Insert MP lock prefix. */ +#endif -/** - * @file - * Atomic Operations on i686 - */ +#define rte_mb() _mm_mfence() + +#define rte_wmb() _mm_sfence() + +#define rte_rmb() _mm_lfence() + +/*------------------------- 16 bit atomic operations -------------------------*/ + +#ifndef RTE_FORCE_INTRINSICS +static inline int +rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src) +{ + uint8_t res; + + asm volatile( + MPLOCKED + "cmpxchgw %[src], %[dst];" + "sete %[res];" + : [res] "=a" (res), /* output */ + [dst] "=m" (*dst) + : [src] "r" (src), /* input */ + "a" (exp), + "m" (*dst) + : "memory"); /* no-clobber list */ + return res; +} + +static inline int rte_atomic16_test_and_set(rte_atomic16_t *v) +{ + return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1); +} + +static inline void +rte_atomic16_inc(rte_atomic16_t *v) +{ + asm volatile( + MPLOCKED + "incw %[cnt]" + : [cnt] "=m" (v->cnt) /* output */ + : "m" (v->cnt) /* input */ + ); +} + +static inline void +rte_atomic16_dec(rte_atomic16_t *v) +{ + asm volatile( + MPLOCKED + "decw %[cnt]" + : [cnt] "=m" (v->cnt) /* output */ + : "m" (v->cnt) /* input */ + ); +} + +static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v) +{ + uint8_t ret; + + asm volatile( + MPLOCKED + "incw %[cnt] ; " + "sete %[ret]" + : [cnt] "+m" (v->cnt), /* output */ + [ret] "=qm" (ret) + ); + return (ret != 0); +} + +static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v) +{ + uint8_t ret; + + asm volatile(MPLOCKED + "decw %[cnt] ; " + "sete %[ret]" + : [cnt] "+m" (v->cnt), /* output */ + [ret] "=qm" (ret) + ); + return (ret != 0); +} + +/*------------------------- 32 bit atomic operations -------------------------*/ + +static inline int +rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src) +{ + uint8_t res; + + asm volatile( + MPLOCKED + "cmpxchgl %[src], %[dst];" + "sete %[res];" + : [res] "=a" (res), /* output */ + [dst] "=m" (*dst) + : [src] "r" (src), /* input */ + "a" (exp), + "m" (*dst) + : "memory"); /* no-clobber list */ + return res; +} + +static inline int rte_atomic32_test_and_set(rte_atomic32_t *v) +{ + return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1); +} + +static inline void +rte_atomic32_inc(rte_atomic32_t *v) +{ + asm volatile( + MPLOCKED + "incl %[cnt]" + : [cnt] "=m" (v->cnt) /* output */ + : "m" (v->cnt) /* input */ + ); +} +static inline void +rte_atomic32_dec(rte_atomic32_t *v) +{ + asm volatile( + MPLOCKED + "decl %[cnt]" + : [cnt] "=m" (v->cnt) /* output */ + : "m" (v->cnt) /* input */ + ); +} + +static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v) +{ + uint8_t ret; + + asm volatile( + MPLOCKED + "incl %[cnt] ; " + "sete %[ret]" + : [cnt] "+m" (v->cnt), /* output */ + [ret] "=qm" (ret) + ); + return (ret != 0); +} + +static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v) +{ + uint8_t ret; + + asm volatile(MPLOCKED + "decl %[cnt] ; " + "sete %[ret]" + : [cnt] "+m" (v->cnt), /* output */ + [ret] "=qm" (ret) + ); + return (ret != 0); +} /*------------------------- 64 bit atomic operations -------------------------*/ -/** - * An atomic compare and set function used by the mutex functions. - * (atomic) equivalent to: - * if (*dst == exp) - * *dst = src (all 64-bit words) - * - * @param dst - * The destination into which the value will be written. - * @param exp - * The expected value. - * @param src - * The new value. - * @return - * Non-zero on success; 0 on failure. - */ static inline int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src) { @@ -114,24 +256,6 @@ rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src) return res; } -/** - * The atomic counter structure. - */ -typedef struct { - volatile int64_t cnt; /**< Internal counter value. */ -} rte_atomic64_t; - -/** - * Static initializer for an atomic counter. - */ -#define RTE_ATOMIC64_INIT(val) { (val) } - -/** - * Initialize the atomic counter. - * - * @param v - * A pointer to the atomic counter. - */ static inline void rte_atomic64_init(rte_atomic64_t *v) { @@ -145,14 +269,6 @@ rte_atomic64_init(rte_atomic64_t *v) } } -/** - * Atomically read a 64-bit counter. - * - * @param v - * A pointer to the atomic counter. - * @return - * The value of the counter. - */ static inline int64_t rte_atomic64_read(rte_atomic64_t *v) { @@ -168,14 +284,6 @@ rte_atomic64_read(rte_atomic64_t *v) return tmp; } -/** - * Atomically set a 64-bit counter. - * - * @param v - * A pointer to the atomic counter. - * @param new_value - * The new value of the counter. - */ static inline void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value) { @@ -189,14 +297,6 @@ rte_atomic64_set(rte_atomic64_t *v, int64_t new_value) } } -/** - * Atomically add a 64-bit value to a counter. - * - * @param v - * A pointer to the atomic counter. - * @param inc - * The value to be added to the counter. - */ static inline void rte_atomic64_add(rte_atomic64_t *v, int64_t inc) { @@ -210,14 +310,6 @@ rte_atomic64_add(rte_atomic64_t *v, int64_t inc) } } -/** - * Atomically subtract a 64-bit value from a counter. - * - * @param v - * A pointer to the atomic counter. - * @param dec - * The value to be subtracted from the counter. - */ static inline void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec) { @@ -231,43 +323,18 @@ rte_atomic64_sub(rte_atomic64_t *v, int64_t dec) } } -/** - * Atomically increment a 64-bit counter by one and test. - * - * @param v - * A pointer to the atomic counter. - */ static inline void rte_atomic64_inc(rte_atomic64_t *v) { rte_atomic64_add(v, 1); } -/** - * Atomically decrement a 64-bit counter by one and test. - * - * @param v - * A pointer to the atomic counter. - */ static inline void rte_atomic64_dec(rte_atomic64_t *v) { rte_atomic64_sub(v, 1); } -/** - * Add a 64-bit value to an atomic counter and return the result. - * - * Atomically adds the 64-bit value (inc) to the atomic counter (v) and - * returns the value of v after the addition. - * - * @param v - * A pointer to the atomic counter. - * @param inc - * The value to be added to the counter. - * @return - * The value of v after the addition. - */ static inline int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc) { @@ -283,19 +350,6 @@ rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc) return tmp + inc; } -/** - * Subtract a 64-bit value from an atomic counter and return the result. - * - * Atomically subtracts the 64-bit value (dec) from the atomic counter (v) - * and returns the value of v after the subtraction. - * - * @param v - * A pointer to the atomic counter. - * @param dec - * The value to be subtracted from the counter. - * @return - * The value of v after the subtraction. - */ static inline int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec) { @@ -311,63 +365,29 @@ rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec) return tmp - dec; } -/** - * Atomically increment a 64-bit counter by one and test. - * - * Atomically increments the atomic counter (v) by one and returns - * true if the result is 0, or false in all other cases. - * - * @param v - * A pointer to the atomic counter. - * @return - * True if the result after the addition is 0; false otherwise. - */ static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v) { return rte_atomic64_add_return(v, 1) == 0; } -/** - * Atomically decrement a 64-bit counter by one and test. - * - * Atomically decrements the atomic counter (v) by one and returns true if - * the result is 0, or false in all other cases. - * - * @param v - * A pointer to the atomic counter. - * @return - * True if the result after subtraction is 0; false otherwise. - */ static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v) { return rte_atomic64_sub_return(v, 1) == 0; } -/** - * Atomically test and set a 64-bit atomic counter. - * - * If the counter value is already set, return 0 (failed). Otherwise, set - * the counter value to 1 and return 1 (success). - * - * @param v - * A pointer to the atomic counter. - * @return - * 0 if failed; else 1, success. - */ static inline int rte_atomic64_test_and_set(rte_atomic64_t *v) { return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1); } -/** - * Atomically set a 64-bit counter to 0. - * - * @param v - * A pointer to the atomic counter. - */ static inline void rte_atomic64_clear(rte_atomic64_t *v) { rte_atomic64_set(v, 0); } +#endif + +#ifdef __cplusplus +} +#endif -#endif /* _RTE_I686_ATOMIC_H_ */ +#endif /* _RTE_ATOMIC_I686_H_ */ diff --git a/lib/librte_eal/common/include/x86_64/arch/rte_atomic.h b/lib/librte_eal/common/include/arch/x86_64/rte_atomic.h similarity index 55% rename from lib/librte_eal/common/include/x86_64/arch/rte_atomic.h rename to lib/librte_eal/common/include/arch/x86_64/rte_atomic.h index 3ba7d3a505..9138328db6 100644 --- a/lib/librte_eal/common/include/x86_64/arch/rte_atomic.h +++ b/lib/librte_eal/common/include/arch/x86_64/rte_atomic.h @@ -37,36 +37,185 @@ * All rights reserved. */ -#ifndef _RTE_ATOMIC_H_ -#error "don't include this file directly, please include generic " +#ifndef _RTE_ATOMIC_X86_64_H_ +#define _RTE_ATOMIC_X86_64_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include "generic/rte_atomic.h" + +#if RTE_MAX_LCORE == 1 +#define MPLOCKED /**< No need to insert MP lock prefix. */ +#else +#define MPLOCKED "lock ; " /**< Insert MP lock prefix. */ #endif -#ifndef _RTE_X86_64_ATOMIC_H_ -#define _RTE_X86_64_ATOMIC_H_ +#define rte_mb() _mm_mfence() + +#define rte_wmb() _mm_sfence() + +#define rte_rmb() _mm_lfence() + +/*------------------------- 16 bit atomic operations -------------------------*/ + +#ifndef RTE_FORCE_INTRINSICS +static inline int +rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src) +{ + uint8_t res; + + asm volatile( + MPLOCKED + "cmpxchgw %[src], %[dst];" + "sete %[res];" + : [res] "=a" (res), /* output */ + [dst] "=m" (*dst) + : [src] "r" (src), /* input */ + "a" (exp), + "m" (*dst) + : "memory"); /* no-clobber list */ + return res; +} + +static inline int rte_atomic16_test_and_set(rte_atomic16_t *v) +{ + return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1); +} + +static inline void +rte_atomic16_inc(rte_atomic16_t *v) +{ + asm volatile( + MPLOCKED + "incw %[cnt]" + : [cnt] "=m" (v->cnt) /* output */ + : "m" (v->cnt) /* input */ + ); +} + +static inline void +rte_atomic16_dec(rte_atomic16_t *v) +{ + asm volatile( + MPLOCKED + "decw %[cnt]" + : [cnt] "=m" (v->cnt) /* output */ + : "m" (v->cnt) /* input */ + ); +} + +static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v) +{ + uint8_t ret; + asm volatile( + MPLOCKED + "incw %[cnt] ; " + "sete %[ret]" + : [cnt] "+m" (v->cnt), /* output */ + [ret] "=qm" (ret) + ); + return (ret != 0); +} + +static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v) +{ + uint8_t ret; + + asm volatile(MPLOCKED + "decw %[cnt] ; " + "sete %[ret]" + : [cnt] "+m" (v->cnt), /* output */ + [ret] "=qm" (ret) + ); + return (ret != 0); +} + +/*------------------------- 32 bit atomic operations -------------------------*/ + +static inline int +rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src) +{ + uint8_t res; + + asm volatile( + MPLOCKED + "cmpxchgl %[src], %[dst];" + "sete %[res];" + : [res] "=a" (res), /* output */ + [dst] "=m" (*dst) + : [src] "r" (src), /* input */ + "a" (exp), + "m" (*dst) + : "memory"); /* no-clobber list */ + return res; +} + +static inline int rte_atomic32_test_and_set(rte_atomic32_t *v) +{ + return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1); +} + +static inline void +rte_atomic32_inc(rte_atomic32_t *v) +{ + asm volatile( + MPLOCKED + "incl %[cnt]" + : [cnt] "=m" (v->cnt) /* output */ + : "m" (v->cnt) /* input */ + ); +} + +static inline void +rte_atomic32_dec(rte_atomic32_t *v) +{ + asm volatile( + MPLOCKED + "decl %[cnt]" + : [cnt] "=m" (v->cnt) /* output */ + : "m" (v->cnt) /* input */ + ); +} + +static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v) +{ + uint8_t ret; + + asm volatile( + MPLOCKED + "incl %[cnt] ; " + "sete %[ret]" + : [cnt] "+m" (v->cnt), /* output */ + [ret] "=qm" (ret) + ); + return (ret != 0); +} + +static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v) +{ + uint8_t ret; + + asm volatile(MPLOCKED + "decl %[cnt] ; " + "sete %[ret]" + : [cnt] "+m" (v->cnt), /* output */ + [ret] "=qm" (ret) + ); + return (ret != 0); +} /*------------------------- 64 bit atomic operations -------------------------*/ -/** - * An atomic compare and set function used by the mutex functions. - * (atomic) equivalent to: - * if (*dst == exp) - * *dst = src (all 64-bit words) - * - * @param dst - * The destination into which the value will be written. - * @param exp - * The expected value. - * @param src - * The new value. - * @return - * Non-zero on success; 0 on failure. - */ static inline int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src) { uint8_t res; + asm volatile( MPLOCKED "cmpxchgq %[src], %[dst];" @@ -81,66 +230,24 @@ rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src) return res; } -/** - * The atomic counter structure. - */ -typedef struct { - volatile int64_t cnt; /**< Internal counter value. */ -} rte_atomic64_t; - -/** - * Static initializer for an atomic counter. - */ -#define RTE_ATOMIC64_INIT(val) { (val) } - -/** - * Initialize the atomic counter. - * - * @param v - * A pointer to the atomic counter. - */ static inline void rte_atomic64_init(rte_atomic64_t *v) { v->cnt = 0; } -/** - * Atomically read a 64-bit counter. - * - * @param v - * A pointer to the atomic counter. - * @return - * The value of the counter. - */ static inline int64_t rte_atomic64_read(rte_atomic64_t *v) { return v->cnt; } -/** - * Atomically set a 64-bit counter. - * - * @param v - * A pointer to the atomic counter. - * @param new_value - * The new value of the counter. - */ static inline void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value) { v->cnt = new_value; } -/** - * Atomically add a 64-bit value to a counter. - * - * @param v - * A pointer to the atomic counter. - * @param inc - * The value to be added to the counter. - */ static inline void rte_atomic64_add(rte_atomic64_t *v, int64_t inc) { @@ -153,14 +260,6 @@ rte_atomic64_add(rte_atomic64_t *v, int64_t inc) ); } -/** - * Atomically subtract a 64-bit value from a counter. - * - * @param v - * A pointer to the atomic counter. - * @param dec - * The value to be subtracted from the counter. - */ static inline void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec) { @@ -173,12 +272,6 @@ rte_atomic64_sub(rte_atomic64_t *v, int64_t dec) ); } -/** - * Atomically increment a 64-bit counter by one and test. - * - * @param v - * A pointer to the atomic counter. - */ static inline void rte_atomic64_inc(rte_atomic64_t *v) { @@ -190,12 +283,6 @@ rte_atomic64_inc(rte_atomic64_t *v) ); } -/** - * Atomically decrement a 64-bit counter by one and test. - * - * @param v - * A pointer to the atomic counter. - */ static inline void rte_atomic64_dec(rte_atomic64_t *v) { @@ -207,19 +294,6 @@ rte_atomic64_dec(rte_atomic64_t *v) ); } -/** - * Add a 64-bit value to an atomic counter and return the result. - * - * Atomically adds the 64-bit value (inc) to the atomic counter (v) and - * returns the value of v after the addition. - * - * @param v - * A pointer to the atomic counter. - * @param inc - * The value to be added to the counter. - * @return - * The value of v after the addition. - */ static inline int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc) { @@ -235,36 +309,12 @@ rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc) return prev + inc; } -/** - * Subtract a 64-bit value from an atomic counter and return the result. - * - * Atomically subtracts the 64-bit value (dec) from the atomic counter (v) - * and returns the value of v after the subtraction. - * - * @param v - * A pointer to the atomic counter. - * @param dec - * The value to be subtracted from the counter. - * @return - * The value of v after the subtraction. - */ static inline int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec) { return rte_atomic64_add_return(v, -dec); } -/** - * Atomically increment a 64-bit counter by one and test. - * - * Atomically increments the atomic counter (v) by one and returns - * true if the result is 0, or false in all other cases. - * - * @param v - * A pointer to the atomic counter. - * @return - * True if the result after the addition is 0; false otherwise. - */ static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v) { uint8_t ret; @@ -280,17 +330,6 @@ static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v) return ret != 0; } -/** - * Atomically decrement a 64-bit counter by one and test. - * - * Atomically decrements the atomic counter (v) by one and returns true if - * the result is 0, or false in all other cases. - * - * @param v - * A pointer to the atomic counter. - * @return - * True if the result after subtraction is 0; false otherwise. - */ static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v) { uint8_t ret; @@ -305,31 +344,19 @@ static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v) return ret != 0; } -/** - * Atomically test and set a 64-bit atomic counter. - * - * If the counter value is already set, return 0 (failed). Otherwise, set - * the counter value to 1 and return 1 (success). - * - * @param v - * A pointer to the atomic counter. - * @return - * 0 if failed; else 1, success. - */ static inline int rte_atomic64_test_and_set(rte_atomic64_t *v) { return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1); } -/** - * Atomically set a 64-bit counter to 0. - * - * @param v - * A pointer to the atomic counter. - */ static inline void rte_atomic64_clear(rte_atomic64_t *v) { v->cnt = 0; } +#endif + +#ifdef __cplusplus +} +#endif -#endif /* _RTE_X86_64_ATOMIC_H_ */ +#endif /* _RTE_ATOMIC_X86_64_H_ */ diff --git a/lib/librte_eal/common/include/rte_atomic.h b/lib/librte_eal/common/include/generic/rte_atomic.h similarity index 74% rename from lib/librte_eal/common/include/rte_atomic.h rename to lib/librte_eal/common/include/generic/rte_atomic.h index a5b6eec302..6c7581adac 100644 --- a/lib/librte_eal/common/include/rte_atomic.h +++ b/lib/librte_eal/common/include/generic/rte_atomic.h @@ -38,48 +38,41 @@ * @file * Atomic Operations * - * This file defines a generic API for atomic - * operations. The implementation is architecture-specific. - * - * See lib/librte_eal/common/include/i686/arch/rte_atomic.h - * See lib/librte_eal/common/include/x86_64/arch/rte_atomic.h + * This file defines a generic API for atomic operations. */ -#ifdef __cplusplus -extern "C" { -#endif - #include -#if RTE_MAX_LCORE == 1 -#define MPLOCKED /**< No need to insert MP lock prefix. */ -#else -#define MPLOCKED "lock ; " /**< Insert MP lock prefix. */ -#endif +#ifdef __DOXYGEN__ /** * General memory barrier. * * Guarantees that the LOAD and STORE operations generated before the * barrier occur before the LOAD and STORE operations generated after. + * This function is architecture dependent. */ -#define rte_mb() _mm_mfence() +static inline void rte_mb(void); /** * Write memory barrier. * * Guarantees that the STORE operations generated before the barrier * occur before the STORE operations generated after. + * This function is architecture dependent. */ -#define rte_wmb() _mm_sfence() +static inline void rte_wmb(void); /** * Read memory barrier. * * Guarantees that the LOAD operations generated before the barrier * occur before the LOAD operations generated after. + * This function is architecture dependent. */ -#define rte_rmb() _mm_lfence() +static inline void rte_rmb(void); + +#endif /* __DOXYGEN__ */ /** * Compiler barrier. @@ -91,13 +84,6 @@ extern "C" { asm volatile ("" : : : "memory"); \ } while(0) -#include - -/** - * @file - * Atomic Operations on x86_64 - */ - /*------------------------- 16 bit atomic operations -------------------------*/ /** @@ -117,26 +103,15 @@ extern "C" { * Non-zero on success; 0 on failure. */ static inline int +rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src); + +#ifdef RTE_FORCE_INTRINSICS +static inline int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src) { -#ifndef RTE_FORCE_INTRINSICS - uint8_t res; - - asm volatile( - MPLOCKED - "cmpxchgw %[src], %[dst];" - "sete %[res];" - : [res] "=a" (res), /* output */ - [dst] "=m" (*dst) - : [src] "r" (src), /* input */ - "a" (exp), - "m" (*dst) - : "memory"); /* no-clobber list */ - return res; -#else return __sync_bool_compare_and_swap(dst, exp, src); -#endif } +#endif /** * The atomic counter structure. @@ -225,19 +200,15 @@ rte_atomic16_sub(rte_atomic16_t *v, int16_t dec) * A pointer to the atomic counter. */ static inline void +rte_atomic16_inc(rte_atomic16_t *v); + +#ifdef RTE_FORCE_INTRINSICS +static inline void rte_atomic16_inc(rte_atomic16_t *v) { -#ifndef RTE_FORCE_INTRINSICS - asm volatile( - MPLOCKED - "incw %[cnt]" - : [cnt] "=m" (v->cnt) /* output */ - : "m" (v->cnt) /* input */ - ); -#else rte_atomic16_add(v, 1); -#endif } +#endif /** * Atomically decrement a counter by one. @@ -246,19 +217,15 @@ rte_atomic16_inc(rte_atomic16_t *v) * A pointer to the atomic counter. */ static inline void +rte_atomic16_dec(rte_atomic16_t *v); + +#ifdef RTE_FORCE_INTRINSICS +static inline void rte_atomic16_dec(rte_atomic16_t *v) { -#ifndef RTE_FORCE_INTRINSICS - asm volatile( - MPLOCKED - "decw %[cnt]" - : [cnt] "=m" (v->cnt) /* output */ - : "m" (v->cnt) /* input */ - ); -#else rte_atomic16_sub(v, 1); -#endif } +#endif /** * Atomically add a 16-bit value to a counter and return the result. @@ -310,23 +277,14 @@ rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec) * @return * True if the result after the increment operation is 0; false otherwise. */ +static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v); + +#ifdef RTE_FORCE_INTRINSICS static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v) { -#ifndef RTE_FORCE_INTRINSICS - uint8_t ret; - - asm volatile( - MPLOCKED - "incw %[cnt] ; " - "sete %[ret]" - : [cnt] "+m" (v->cnt), /* output */ - [ret] "=qm" (ret) - ); - return (ret != 0); -#else return (__sync_add_and_fetch(&v->cnt, 1) == 0); -#endif } +#endif /** * Atomically decrement a 16-bit counter by one and test. @@ -339,22 +297,14 @@ static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v) * @return * True if the result after the decrement operation is 0; false otherwise. */ +static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v); + +#ifdef RTE_FORCE_INTRINSICS static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v) { -#ifndef RTE_FORCE_INTRINSICS - uint8_t ret; - - asm volatile(MPLOCKED - "decw %[cnt] ; " - "sete %[ret]" - : [cnt] "+m" (v->cnt), /* output */ - [ret] "=qm" (ret) - ); - return (ret != 0); -#else return (__sync_sub_and_fetch(&v->cnt, 1) == 0); -#endif } +#endif /** * Atomically test and set a 16-bit atomic counter. @@ -367,10 +317,14 @@ static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v) * @return * 0 if failed; else 1, success. */ +static inline int rte_atomic16_test_and_set(rte_atomic16_t *v); + +#ifdef RTE_FORCE_INTRINSICS static inline int rte_atomic16_test_and_set(rte_atomic16_t *v) { return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1); } +#endif /** * Atomically set a 16-bit counter to 0. @@ -402,26 +356,15 @@ static inline void rte_atomic16_clear(rte_atomic16_t *v) * Non-zero on success; 0 on failure. */ static inline int +rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src); + +#ifdef RTE_FORCE_INTRINSICS +static inline int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src) { -#ifndef RTE_FORCE_INTRINSICS - uint8_t res; - - asm volatile( - MPLOCKED - "cmpxchgl %[src], %[dst];" - "sete %[res];" - : [res] "=a" (res), /* output */ - [dst] "=m" (*dst) - : [src] "r" (src), /* input */ - "a" (exp), - "m" (*dst) - : "memory"); /* no-clobber list */ - return res; -#else return __sync_bool_compare_and_swap(dst, exp, src); -#endif } +#endif /** * The atomic counter structure. @@ -510,19 +453,15 @@ rte_atomic32_sub(rte_atomic32_t *v, int32_t dec) * A pointer to the atomic counter. */ static inline void +rte_atomic32_inc(rte_atomic32_t *v); + +#ifdef RTE_FORCE_INTRINSICS +static inline void rte_atomic32_inc(rte_atomic32_t *v) { -#ifndef RTE_FORCE_INTRINSICS - asm volatile( - MPLOCKED - "incl %[cnt]" - : [cnt] "=m" (v->cnt) /* output */ - : "m" (v->cnt) /* input */ - ); -#else rte_atomic32_add(v, 1); -#endif } +#endif /** * Atomically decrement a counter by one. @@ -531,19 +470,15 @@ rte_atomic32_inc(rte_atomic32_t *v) * A pointer to the atomic counter. */ static inline void +rte_atomic32_dec(rte_atomic32_t *v); + +#ifdef RTE_FORCE_INTRINSICS +static inline void rte_atomic32_dec(rte_atomic32_t *v) { -#ifndef RTE_FORCE_INTRINSICS - asm volatile( - MPLOCKED - "decl %[cnt]" - : [cnt] "=m" (v->cnt) /* output */ - : "m" (v->cnt) /* input */ - ); -#else rte_atomic32_sub(v,1); -#endif } +#endif /** * Atomically add a 32-bit value to a counter and return the result. @@ -595,23 +530,14 @@ rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec) * @return * True if the result after the increment operation is 0; false otherwise. */ +static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v); + +#ifdef RTE_FORCE_INTRINSICS static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v) { -#ifndef RTE_FORCE_INTRINSICS - uint8_t ret; - - asm volatile( - MPLOCKED - "incl %[cnt] ; " - "sete %[ret]" - : [cnt] "+m" (v->cnt), /* output */ - [ret] "=qm" (ret) - ); - return (ret != 0); -#else return (__sync_add_and_fetch(&v->cnt, 1) == 0); -#endif } +#endif /** * Atomically decrement a 32-bit counter by one and test. @@ -624,22 +550,14 @@ static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v) * @return * True if the result after the decrement operation is 0; false otherwise. */ +static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v); + +#ifdef RTE_FORCE_INTRINSICS static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v) { -#ifndef RTE_FORCE_INTRINSICS - uint8_t ret; - - asm volatile(MPLOCKED - "decl %[cnt] ; " - "sete %[ret]" - : [cnt] "+m" (v->cnt), /* output */ - [ret] "=qm" (ret) - ); - return (ret != 0); -#else return (__sync_sub_and_fetch(&v->cnt, 1) == 0); -#endif } +#endif /** * Atomically test and set a 32-bit atomic counter. @@ -652,10 +570,14 @@ static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v) * @return * 0 if failed; else 1, success. */ +static inline int rte_atomic32_test_and_set(rte_atomic32_t *v); + +#ifdef RTE_FORCE_INTRINSICS static inline int rte_atomic32_test_and_set(rte_atomic32_t *v) { return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1); } +#endif /** * Atomically set a 32-bit counter to 0. @@ -668,13 +590,6 @@ static inline void rte_atomic32_clear(rte_atomic32_t *v) v->cnt = 0; } -#ifndef RTE_FORCE_INTRINSICS -/* any other functions are in arch specific files */ -#include "arch/rte_atomic.h" - - -#ifdef __DOXYGEN__ - /*------------------------- 64 bit atomic operations -------------------------*/ /** @@ -695,198 +610,13 @@ static inline void rte_atomic32_clear(rte_atomic32_t *v) static inline int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src); -/** - * The atomic counter structure. - */ -typedef struct { - volatile int64_t cnt; /**< Internal counter value. */ -} rte_atomic64_t; - -/** - * Static initializer for an atomic counter. - */ -#define RTE_ATOMIC64_INIT(val) { (val) } - -/** - * Initialize the atomic counter. - * - * @param v - * A pointer to the atomic counter. - */ -static inline void -rte_atomic64_init(rte_atomic64_t *v); - -/** - * Atomically read a 64-bit counter. - * - * @param v - * A pointer to the atomic counter. - * @return - * The value of the counter. - */ -static inline int64_t -rte_atomic64_read(rte_atomic64_t *v); - -/** - * Atomically set a 64-bit counter. - * - * @param v - * A pointer to the atomic counter. - * @param new_value - * The new value of the counter. - */ -static inline void -rte_atomic64_set(rte_atomic64_t *v, int64_t new_value); - -/** - * Atomically add a 64-bit value to a counter. - * - * @param v - * A pointer to the atomic counter. - * @param inc - * The value to be added to the counter. - */ -static inline void -rte_atomic64_add(rte_atomic64_t *v, int64_t inc); - -/** - * Atomically subtract a 64-bit value from a counter. - * - * @param v - * A pointer to the atomic counter. - * @param dec - * The value to be subtracted from the counter. - */ -static inline void -rte_atomic64_sub(rte_atomic64_t *v, int64_t dec); - -/** - * Atomically increment a 64-bit counter by one and test. - * - * @param v - * A pointer to the atomic counter. - */ -static inline void -rte_atomic64_inc(rte_atomic64_t *v); - -/** - * Atomically decrement a 64-bit counter by one and test. - * - * @param v - * A pointer to the atomic counter. - */ -static inline void -rte_atomic64_dec(rte_atomic64_t *v); - -/** - * Add a 64-bit value to an atomic counter and return the result. - * - * Atomically adds the 64-bit value (inc) to the atomic counter (v) and - * returns the value of v after the addition. - * - * @param v - * A pointer to the atomic counter. - * @param inc - * The value to be added to the counter. - * @return - * The value of v after the addition. - */ -static inline int64_t -rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc); - -/** - * Subtract a 64-bit value from an atomic counter and return the result. - * - * Atomically subtracts the 64-bit value (dec) from the atomic counter (v) - * and returns the value of v after the subtraction. - * - * @param v - * A pointer to the atomic counter. - * @param dec - * The value to be subtracted from the counter. - * @return - * The value of v after the subtraction. - */ -static inline int64_t -rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec); - -/** - * Atomically increment a 64-bit counter by one and test. - * - * Atomically increments the atomic counter (v) by one and returns - * true if the result is 0, or false in all other cases. - * - * @param v - * A pointer to the atomic counter. - * @return - * True if the result after the addition is 0; false otherwise. - */ -static inline int -rte_atomic64_inc_and_test(rte_atomic64_t *v); - -/** - * Atomically decrement a 64-bit counter by one and test. - * - * Atomically decrements the atomic counter (v) by one and returns true if - * the result is 0, or false in all other cases. - * - * @param v - * A pointer to the atomic counter. - * @return - * True if the result after subtraction is 0; false otherwise. - */ -static inline int -rte_atomic64_dec_and_test(rte_atomic64_t *v); - -/** - * Atomically test and set a 64-bit atomic counter. - * - * If the counter value is already set, return 0 (failed). Otherwise, set - * the counter value to 1 and return 1 (success). - * - * @param v - * A pointer to the atomic counter. - * @return - * 0 if failed; else 1, success. - */ -static inline int -rte_atomic64_test_and_set(rte_atomic64_t *v); - -/** - * Atomically set a 64-bit counter to 0. - * - * @param v - * A pointer to the atomic counter. - */ -static inline void -rte_atomic64_clear(rte_atomic64_t *v); - -#endif /* __DOXYGEN__ */ - -#else /*RTE_FORCE_INTRINSICS */ - -/*------------------------- 64 bit atomic operations -------------------------*/ - -/** - * An atomic compare and set function used by the mutex functions. - * (atomic) equivalent to: - * if (*dst == exp) - * *dst = src (all 64-bit words) - * - * @param dst - * The destination into which the value will be written. - * @param exp - * The expected value. - * @param src - * The new value. - * @return - * Non-zero on success; 0 on failure. - */ +#ifdef RTE_FORCE_INTRINSICS static inline int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src) { return __sync_bool_compare_and_swap(dst, exp, src); } +#endif /** * The atomic counter structure. @@ -907,6 +637,10 @@ typedef struct { * A pointer to the atomic counter. */ static inline void +rte_atomic64_init(rte_atomic64_t *v); + +#ifdef RTE_FORCE_INTRINSICS +static inline void rte_atomic64_init(rte_atomic64_t *v) { #ifdef __LP64__ @@ -922,6 +656,7 @@ rte_atomic64_init(rte_atomic64_t *v) } #endif } +#endif /** * Atomically read a 64-bit counter. @@ -932,6 +667,10 @@ rte_atomic64_init(rte_atomic64_t *v) * The value of the counter. */ static inline int64_t +rte_atomic64_read(rte_atomic64_t *v); + +#ifdef RTE_FORCE_INTRINSICS +static inline int64_t rte_atomic64_read(rte_atomic64_t *v) { #ifdef __LP64__ @@ -949,6 +688,7 @@ rte_atomic64_read(rte_atomic64_t *v) return tmp; #endif } +#endif /** * Atomically set a 64-bit counter. @@ -959,6 +699,10 @@ rte_atomic64_read(rte_atomic64_t *v) * The new value of the counter. */ static inline void +rte_atomic64_set(rte_atomic64_t *v, int64_t new_value); + +#ifdef RTE_FORCE_INTRINSICS +static inline void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value) { #ifdef __LP64__ @@ -974,6 +718,7 @@ rte_atomic64_set(rte_atomic64_t *v, int64_t new_value) } #endif } +#endif /** * Atomically add a 64-bit value to a counter. @@ -984,10 +729,15 @@ rte_atomic64_set(rte_atomic64_t *v, int64_t new_value) * The value to be added to the counter. */ static inline void +rte_atomic64_add(rte_atomic64_t *v, int64_t inc); + +#ifdef RTE_FORCE_INTRINSICS +static inline void rte_atomic64_add(rte_atomic64_t *v, int64_t inc) { __sync_fetch_and_add(&v->cnt, inc); } +#endif /** * Atomically subtract a 64-bit value from a counter. @@ -998,10 +748,15 @@ rte_atomic64_add(rte_atomic64_t *v, int64_t inc) * The value to be subtracted from the counter. */ static inline void +rte_atomic64_sub(rte_atomic64_t *v, int64_t dec); + +#ifdef RTE_FORCE_INTRINSICS +static inline void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec) { __sync_fetch_and_sub(&v->cnt, dec); } +#endif /** * Atomically increment a 64-bit counter by one and test. @@ -1010,10 +765,15 @@ rte_atomic64_sub(rte_atomic64_t *v, int64_t dec) * A pointer to the atomic counter. */ static inline void +rte_atomic64_inc(rte_atomic64_t *v); + +#ifdef RTE_FORCE_INTRINSICS +static inline void rte_atomic64_inc(rte_atomic64_t *v) { rte_atomic64_add(v, 1); } +#endif /** * Atomically decrement a 64-bit counter by one and test. @@ -1022,10 +782,15 @@ rte_atomic64_inc(rte_atomic64_t *v) * A pointer to the atomic counter. */ static inline void +rte_atomic64_dec(rte_atomic64_t *v); + +#ifdef RTE_FORCE_INTRINSICS +static inline void rte_atomic64_dec(rte_atomic64_t *v) { rte_atomic64_sub(v, 1); } +#endif /** * Add a 64-bit value to an atomic counter and return the result. @@ -1041,10 +806,15 @@ rte_atomic64_dec(rte_atomic64_t *v) * The value of v after the addition. */ static inline int64_t +rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc); + +#ifdef RTE_FORCE_INTRINSICS +static inline int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc) { return __sync_add_and_fetch(&v->cnt, inc); } +#endif /** * Subtract a 64-bit value from an atomic counter and return the result. @@ -1060,10 +830,15 @@ rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc) * The value of v after the subtraction. */ static inline int64_t +rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec); + +#ifdef RTE_FORCE_INTRINSICS +static inline int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec) { return __sync_sub_and_fetch(&v->cnt, dec); } +#endif /** * Atomically increment a 64-bit counter by one and test. @@ -1076,10 +851,14 @@ rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec) * @return * True if the result after the addition is 0; false otherwise. */ +static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v); + +#ifdef RTE_FORCE_INTRINSICS static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v) { return rte_atomic64_add_return(v, 1) == 0; } +#endif /** * Atomically decrement a 64-bit counter by one and test. @@ -1092,10 +871,14 @@ static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v) * @return * True if the result after subtraction is 0; false otherwise. */ +static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v); + +#ifdef RTE_FORCE_INTRINSICS static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v) { return rte_atomic64_sub_return(v, 1) == 0; } +#endif /** * Atomically test and set a 64-bit atomic counter. @@ -1108,10 +891,14 @@ static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v) * @return * 0 if failed; else 1, success. */ +static inline int rte_atomic64_test_and_set(rte_atomic64_t *v); + +#ifdef RTE_FORCE_INTRINSICS static inline int rte_atomic64_test_and_set(rte_atomic64_t *v) { return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1); } +#endif /** * Atomically set a 64-bit counter to 0. @@ -1119,15 +906,13 @@ static inline int rte_atomic64_test_and_set(rte_atomic64_t *v) * @param v * A pointer to the atomic counter. */ +static inline void rte_atomic64_clear(rte_atomic64_t *v); + +#ifdef RTE_FORCE_INTRINSICS static inline void rte_atomic64_clear(rte_atomic64_t *v) { rte_atomic64_set(v, 0); } - -#endif /*RTE_FORCE_INTRINSICS */ - -#ifdef __cplusplus -} #endif #endif /* _RTE_ATOMIC_H_ */ -- 2.20.1