From: Thomas Monjalon Date: Tue, 19 Mar 2019 21:16:00 +0000 (+0100) Subject: eal: remove redundant atomic API description X-Git-Url: http://git.droids-corp.org/?a=commitdiff_plain;h=3a1a885e036c78521f74fdf7777f24e9dc3466df;p=dpdk.git eal: remove redundant atomic API description Atomic functions are described in doxygen of the file lib/librte_eal/common/include/generic/rte_atomic.h The copies in arch-specific files are redundant and confuse readers about the genericity of the API. Signed-off-by: Thomas Monjalon Acked-by: Shahaf Shuler --- diff --git a/lib/librte_eal/common/include/arch/arm/rte_atomic_32.h b/lib/librte_eal/common/include/arch/arm/rte_atomic_32.h index 859562e593..7dc0d06d14 100644 --- a/lib/librte_eal/common/include/arch/arm/rte_atomic_32.h +++ b/lib/librte_eal/common/include/arch/arm/rte_atomic_32.h @@ -15,28 +15,10 @@ extern "C" { #include "generic/rte_atomic.h" -/** - * General memory barrier. - * - * Guarantees that the LOAD and STORE operations generated before the - * barrier occur before the LOAD and STORE operations generated after. - */ #define rte_mb() __sync_synchronize() -/** - * Write memory barrier. - * - * Guarantees that the STORE operations generated before the barrier - * occur before the STORE operations generated after. - */ #define rte_wmb() do { asm volatile ("dmb st" : : : "memory"); } while (0) -/** - * Read memory barrier. - * - * Guarantees that the LOAD operations generated before the barrier - * occur before the LOAD operations generated after. - */ #define rte_rmb() __sync_synchronize() #define rte_smp_mb() rte_mb() diff --git a/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h b/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h index 797381c0f5..d6d4014c2a 100644 --- a/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h +++ b/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h @@ -49,28 +49,10 @@ extern "C" { #include #include "generic/rte_atomic.h" -/** - * General memory barrier. - * - * Guarantees that the LOAD and STORE operations generated before the - * barrier occur before the LOAD and STORE operations generated after. - */ #define rte_mb() asm volatile("sync" : : : "memory") -/** - * Write memory barrier. - * - * Guarantees that the STORE operations generated before the barrier - * occur before the STORE operations generated after. - */ #define rte_wmb() asm volatile("sync" : : : "memory") -/** - * Read memory barrier. - * - * Guarantees that the LOAD operations generated before the barrier - * occur before the LOAD operations generated after. - */ #define rte_rmb() asm volatile("sync" : : : "memory") #define rte_smp_mb() rte_mb() diff --git a/lib/librte_eal/common/include/generic/rte_atomic.h b/lib/librte_eal/common/include/generic/rte_atomic.h index 4afd1acc31..e917427022 100644 --- a/lib/librte_eal/common/include/generic/rte_atomic.h +++ b/lib/librte_eal/common/include/generic/rte_atomic.h @@ -25,7 +25,6 @@ * * Guarantees that the LOAD and STORE operations generated before the * barrier occur before the LOAD and STORE operations generated after. - * This function is architecture dependent. */ static inline void rte_mb(void); @@ -34,7 +33,6 @@ static inline void rte_mb(void); * * Guarantees that the STORE operations generated before the barrier * occur before the STORE operations generated after. - * This function is architecture dependent. */ static inline void rte_wmb(void); @@ -43,7 +41,6 @@ static inline void rte_wmb(void); * * Guarantees that the LOAD operations generated before the barrier * occur before the LOAD operations generated after. - * This function is architecture dependent. */ static inline void rte_rmb(void); ///@}