From ff2863570fccaa9f31db3c91d0f40a5532a198c7 Mon Sep 17 00:00:00 2001 From: Stephen Hemminger Date: Thu, 25 Jan 2018 18:01:37 -0800 Subject: [PATCH] eal: introduce atomic exchange operation To handle atomic update of link status (64 bit), every driver was doing its own version using cmpset. Atomic exchange is a useful primitive in its own right; therefore make it a EAL routine. Signed-off-by: Stephen Hemminger Reviewed-by: Ferruh Yigit Acked-by: Konstantin Ananyev --- .../common/include/arch/ppc_64/rte_atomic.h | 21 ++++- .../common/include/arch/x86/rte_atomic.h | 24 ++++++ .../common/include/arch/x86/rte_atomic_32.h | 12 +++ .../common/include/arch/x86/rte_atomic_64.h | 12 +++ .../common/include/generic/rte_atomic.h | 78 +++++++++++++++++++ 5 files changed, 146 insertions(+), 1 deletion(-) diff --git a/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h b/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h index 182177403b..ce38350bdc 100644 --- a/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h +++ b/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h @@ -136,6 +136,12 @@ static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v) return __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE) == 0; } +static inline uint16_t +rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val) +{ + return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST); +} + /*------------------------- 32 bit atomic operations -------------------------*/ static inline int @@ -237,6 +243,13 @@ static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v) return ret == 0; } + +static inline uint32_t +rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val) +{ + return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST); +} + /*------------------------- 64 bit atomic operations -------------------------*/ static inline int @@ -431,7 +444,6 @@ static inline int rte_atomic64_test_and_set(rte_atomic64_t *v) { return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1); } - /** * Atomically set a 64-bit counter to 0. * @@ -442,6 +454,13 @@ static inline void rte_atomic64_clear(rte_atomic64_t *v) { v->cnt = 0; } + +static inline uint64_t +rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val) +{ + return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST); +} + #endif #ifdef __cplusplus diff --git a/lib/librte_eal/common/include/arch/x86/rte_atomic.h b/lib/librte_eal/common/include/arch/x86/rte_atomic.h index 5cfd3832c2..148398f50a 100644 --- a/lib/librte_eal/common/include/arch/x86/rte_atomic.h +++ b/lib/librte_eal/common/include/arch/x86/rte_atomic.h @@ -104,6 +104,18 @@ rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src) return res; } +static inline uint16_t +rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val) +{ + asm volatile( + MPLOCKED + "xchgw %0, %1;" + : "=r" (val), "=m" (*dst) + : "0" (val), "m" (*dst) + : "memory"); /* no-clobber list */ + return val; +} + static inline int rte_atomic16_test_and_set(rte_atomic16_t *v) { return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1); @@ -178,6 +190,18 @@ rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src) return res; } +static inline uint32_t +rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val) +{ + asm volatile( + MPLOCKED + "xchgl %0, %1;" + : "=r" (val), "=m" (*dst) + : "0" (val), "m" (*dst) + : "memory"); /* no-clobber list */ + return val; +} + static inline int rte_atomic32_test_and_set(rte_atomic32_t *v) { return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1); diff --git a/lib/librte_eal/common/include/arch/x86/rte_atomic_32.h b/lib/librte_eal/common/include/arch/x86/rte_atomic_32.h index fb3abf1879..8d711b6f68 100644 --- a/lib/librte_eal/common/include/arch/x86/rte_atomic_32.h +++ b/lib/librte_eal/common/include/arch/x86/rte_atomic_32.h @@ -98,6 +98,18 @@ rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src) return res; } +static inline uint64_t +rte_atomic64_exchange(volatile uint64_t *dest, uint64_t val) +{ + uint64_t old; + + do { + old = *dest; + } while (rte_atomic64_cmpset(dest, old, val)); + + return old; +} + static inline void rte_atomic64_init(rte_atomic64_t *v) { diff --git a/lib/librte_eal/common/include/arch/x86/rte_atomic_64.h b/lib/librte_eal/common/include/arch/x86/rte_atomic_64.h index 1a53a766bd..fd2ec9c537 100644 --- a/lib/librte_eal/common/include/arch/x86/rte_atomic_64.h +++ b/lib/librte_eal/common/include/arch/x86/rte_atomic_64.h @@ -71,6 +71,18 @@ rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src) return res; } +static inline uint64_t +rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val) +{ + asm volatile( + MPLOCKED + "xchgq %0, %1;" + : "=r" (val), "=m" (*dst) + : "0" (val), "m" (*dst) + : "memory"); /* no-clobber list */ + return val; +} + static inline void rte_atomic64_init(rte_atomic64_t *v) { diff --git a/lib/librte_eal/common/include/generic/rte_atomic.h b/lib/librte_eal/common/include/generic/rte_atomic.h index 50e1b8a4d2..8652c02649 100644 --- a/lib/librte_eal/common/include/generic/rte_atomic.h +++ b/lib/librte_eal/common/include/generic/rte_atomic.h @@ -190,6 +190,32 @@ rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src) } #endif +/** + * Atomic exchange. + * + * (atomic) equivalent to: + * ret = *dst + * *dst = val; + * return ret; + * + * @param dst + * The destination location into which the value will be written. + * @param val + * The new value. + * @return + * The original value at that location + */ +static inline uint16_t +rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val); + +#ifdef RTE_FORCE_INTRINSICS +static inline uint16_t +rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val) +{ + return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST); +} +#endif + /** * The atomic counter structure. */ @@ -443,6 +469,32 @@ rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src) } #endif +/** + * Atomic exchange. + * + * (atomic) equivalent to: + * ret = *dst + * *dst = val; + * return ret; + * + * @param dst + * The destination location into which the value will be written. + * @param val + * The new value. + * @return + * The original value at that location + */ +static inline uint32_t +rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val); + +#ifdef RTE_FORCE_INTRINSICS +static inline uint32_t +rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val) +{ + return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST); +} +#endif + /** * The atomic counter structure. */ @@ -695,6 +747,32 @@ rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src) } #endif +/** + * Atomic exchange. + * + * (atomic) equivalent to: + * ret = *dst + * *dst = val; + * return ret; + * + * @param dst + * The destination location into which the value will be written. + * @param val + * The new value. + * @return + * The original value at that location + */ +static inline uint64_t +rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val); + +#ifdef RTE_FORCE_INTRINSICS +static inline uint64_t +rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val) +{ + return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST); +} +#endif + /** * The atomic counter structure. */ -- 2.20.1