[dpdk-dev] [PATCH] eal: remove redundant API description
Thomas Monjalon
thomas at monjalon.net
Tue Mar 19 22:16:00 CET 2019
Atomic functions are described in doxygen of the file
lib/librte_eal/common/include/generic/rte_atomic.h
The copies in arch-specific files are redundant
and confuse readers about the genericity of the API.
Signed-off-by: Thomas Monjalon <thomas at monjalon.net>
---
.../common/include/arch/arm/rte_atomic_32.h | 18 ------------------
.../common/include/arch/ppc_64/rte_atomic.h | 18 ------------------
.../common/include/generic/rte_atomic.h | 3 ---
3 files changed, 39 deletions(-)
diff --git a/lib/librte_eal/common/include/arch/arm/rte_atomic_32.h b/lib/librte_eal/common/include/arch/arm/rte_atomic_32.h
index 859562e59..7dc0d06d1 100644
--- a/lib/librte_eal/common/include/arch/arm/rte_atomic_32.h
+++ b/lib/librte_eal/common/include/arch/arm/rte_atomic_32.h
@@ -15,28 +15,10 @@ extern "C" {
#include "generic/rte_atomic.h"
-/**
- * General memory barrier.
- *
- * Guarantees that the LOAD and STORE operations generated before the
- * barrier occur before the LOAD and STORE operations generated after.
- */
#define rte_mb() __sync_synchronize()
-/**
- * Write memory barrier.
- *
- * Guarantees that the STORE operations generated before the barrier
- * occur before the STORE operations generated after.
- */
#define rte_wmb() do { asm volatile ("dmb st" : : : "memory"); } while (0)
-/**
- * Read memory barrier.
- *
- * Guarantees that the LOAD operations generated before the barrier
- * occur before the LOAD operations generated after.
- */
#define rte_rmb() __sync_synchronize()
#define rte_smp_mb() rte_mb()
diff --git a/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h b/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h
index ce38350bd..2dd59fd78 100644
--- a/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h
+++ b/lib/librte_eal/common/include/arch/ppc_64/rte_atomic.h
@@ -49,32 +49,14 @@ extern "C" {
#include <stdint.h>
#include "generic/rte_atomic.h"
-/**
- * General memory barrier.
- *
- * Guarantees that the LOAD and STORE operations generated before the
- * barrier occur before the LOAD and STORE operations generated after.
- */
#define rte_mb() asm volatile("sync" : : : "memory")
-/**
- * Write memory barrier.
- *
- * Guarantees that the STORE operations generated before the barrier
- * occur before the STORE operations generated after.
- */
#ifdef RTE_ARCH_64
#define rte_wmb() asm volatile("lwsync" : : : "memory")
#else
#define rte_wmb() asm volatile("sync" : : : "memory")
#endif
-/**
- * Read memory barrier.
- *
- * Guarantees that the LOAD operations generated before the barrier
- * occur before the LOAD operations generated after.
- */
#ifdef RTE_ARCH_64
#define rte_rmb() asm volatile("lwsync" : : : "memory")
#else
diff --git a/lib/librte_eal/common/include/generic/rte_atomic.h b/lib/librte_eal/common/include/generic/rte_atomic.h
index 4afd1acc3..e91742702 100644
--- a/lib/librte_eal/common/include/generic/rte_atomic.h
+++ b/lib/librte_eal/common/include/generic/rte_atomic.h
@@ -25,7 +25,6 @@
*
* Guarantees that the LOAD and STORE operations generated before the
* barrier occur before the LOAD and STORE operations generated after.
- * This function is architecture dependent.
*/
static inline void rte_mb(void);
@@ -34,7 +33,6 @@ static inline void rte_mb(void);
*
* Guarantees that the STORE operations generated before the barrier
* occur before the STORE operations generated after.
- * This function is architecture dependent.
*/
static inline void rte_wmb(void);
@@ -43,7 +41,6 @@ static inline void rte_wmb(void);
*
* Guarantees that the LOAD operations generated before the barrier
* occur before the LOAD operations generated after.
- * This function is architecture dependent.
*/
static inline void rte_rmb(void);
///@}
--
2.20.1
More information about the dev
mailing list