[dpdk-dev] [RFC v2 1/5] eal: add the APIs to wait until equal

Gavin Hu gavin.hu at arm.com
Wed Jul 3 10:58:32 CEST 2019


The rte_wait_until_equalxx APIs abstract the functionality of 'polling
for a memory location to become equal to a given value'.

Signed-off-by: Gavin Hu <gavin.hu at arm.com>
Reviewed-by: Ruifeng Wang <ruifeng.wang at arm.com>
Reviewed-by: Steve Capper <steve.capper at arm.com>
Reviewed-by: Ola Liljedahl <ola.liljedahl at arm.com>
Reviewed-by: Honnappa Nagarahalli <honnappa.nagarahalli at arm.com>
---
 .../common/include/arch/arm/rte_atomic_64.h        |   4 +
 .../common/include/arch/arm/rte_pause_64.h         | 106 +++++++++++++++++++++
 lib/librte_eal/common/include/generic/rte_pause.h  |  39 +++++++-
 3 files changed, 148 insertions(+), 1 deletion(-)

diff --git a/lib/librte_eal/common/include/arch/arm/rte_atomic_64.h b/lib/librte_eal/common/include/arch/arm/rte_atomic_64.h
index 97060e4..8d742c6 100644
--- a/lib/librte_eal/common/include/arch/arm/rte_atomic_64.h
+++ b/lib/librte_eal/common/include/arch/arm/rte_atomic_64.h
@@ -15,8 +15,12 @@ extern "C" {
 
 #include "generic/rte_atomic.h"
 
+#ifndef dsb
 #define dsb(opt) asm volatile("dsb " #opt : : : "memory")
+#endif
+#ifndef dmb
 #define dmb(opt) asm volatile("dmb " #opt : : : "memory")
+#endif
 
 #define rte_mb() dsb(sy)
 
diff --git a/lib/librte_eal/common/include/arch/arm/rte_pause_64.h b/lib/librte_eal/common/include/arch/arm/rte_pause_64.h
index 93895d3..1f7be0a 100644
--- a/lib/librte_eal/common/include/arch/arm/rte_pause_64.h
+++ b/lib/librte_eal/common/include/arch/arm/rte_pause_64.h
@@ -17,6 +17,112 @@ static inline void rte_pause(void)
 	asm volatile("yield" ::: "memory");
 }
 
+#ifdef RTE_USE_WFE
+/* Wait for *addr to be updated with expected value */
+static __rte_always_inline void
+rte_wait_until_equal16(volatile uint16_t *addr, uint16_t expected, int memorder)
+{
+	uint16_t tmp;
+	if (memorder == __ATOMIC_RELAXED)
+		asm volatile(
+			"ldxrh	%w[tmp], %w[addr]\n"
+			"cmp	%w[tmp], %w[expected]\n"
+			"b.eq	2f\n"
+			"sevl\n"
+			"1:	wfe\n"
+			"ldxrh	%w[tmp], %w[addr]\n"
+			"cmp	%w[tmp], %w[expected]\n"
+			"bne	1b\n"
+			"2:\n"
+			: [tmp] "=&r" (tmp)
+			: [addr] "Q"(*addr), [expected] "r"(expected)
+			: "cc", "memory");
+	else
+		asm volatile(
+			"ldaxrh %w[tmp], %w[addr]\n"
+			"cmp	%w[tmp], %w[expected]\n"
+			"b.eq	2f\n"
+			"sevl\n"
+			"1:	wfe\n"
+			"ldaxrh	%w[tmp], %w[addr]\n"
+			"cmp	%w[tmp], %w[expected]\n"
+			"bne	1b\n"
+			"2:\n"
+			: [tmp] "=&r" (tmp)
+			: [addr] "Q"(*addr), [expected] "r"(expected)
+			: "cc", "memory");
+}
+
+static __rte_always_inline void
+rte_wait_until_equal32(volatile uint32_t *addr, uint32_t expected, int memorder)
+{
+	uint32_t tmp;
+	if (memorder == __ATOMIC_RELAXED)
+		asm volatile(
+			"ldxr	%w[tmp], %w[addr]\n"
+			"cmp	%w[tmp], %w[expected]\n"
+			"b.eq	2f\n"
+			"sevl\n"
+			"1:	wfe\n"
+			"ldxr	%w[tmp], %w[addr]\n"
+			"cmp	%w[tmp], %w[expected]\n"
+			"bne	1b\n"
+			"2:\n"
+			: [tmp] "=&r" (tmp)
+			: [addr] "Q"(*addr), [expected] "r"(expected)
+			: "cc", "memory");
+	else
+		asm volatile(
+			"ldaxr  %w[tmp], %w[addr]\n"
+			"cmp	%w[tmp], %w[expected]\n"
+			"b.eq	2f\n"
+			"sevl\n"
+			"1:	wfe\n"
+			"ldaxr  %w[tmp], %w[addr]\n"
+			"cmp	%w[tmp], %w[expected]\n"
+			"bne	1b\n"
+			"2:\n"
+			: [tmp] "=&r" (tmp)
+			: [addr] "Q"(*addr), [expected] "r"(expected)
+			: "cc", "memory");
+}
+
+static __rte_always_inline void
+rte_wait_until_equal64(volatile uint64_t *addr, uint64_t expected, int memorder)
+{
+	uint64_t tmp;
+	if (memorder == __ATOMIC_RELAXED)
+		asm volatile(
+			"ldxr	%x[tmp], %x[addr]\n"
+			"cmp	%x[tmp], %x[expected]\n"
+			"b.eq	2f\n"
+			"sevl\n"
+			"1:	wfe\n"
+			"ldxr	%x[tmp], %x[addr]\n"
+			"cmp	%x[tmp], %x[expected]\n"
+			"bne	1b\n"
+			"2:\n"
+			: [tmp] "=&r" (tmp)
+			: [addr] "Q"(*addr), [expected] "r"(expected)
+			: "cc", "memory");
+	else
+		asm volatile(
+			"ldaxr  %x[tmp], %x[addr]\n"
+			"cmp	%x[tmp], %x[expected]\n"
+			"b.eq	2f\n"
+			"sevl\n"
+			"1:	wfe\n"
+			"ldaxr  %x[tmp], %x[addr]\n"
+			"cmp	%x[tmp], %x[expected]\n"
+			"bne	1b\n"
+			"2:\n"
+			: [tmp] "=&r" (tmp)
+			: [addr] "Q"(*addr), [expected] "r"(expected)
+			: "cc", "memory");
+}
+
+#endif
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/lib/librte_eal/common/include/generic/rte_pause.h b/lib/librte_eal/common/include/generic/rte_pause.h
index 52bd4db..8f5f025 100644
--- a/lib/librte_eal/common/include/generic/rte_pause.h
+++ b/lib/librte_eal/common/include/generic/rte_pause.h
@@ -4,7 +4,6 @@
 
 #ifndef _RTE_PAUSE_H_
 #define _RTE_PAUSE_H_
-
 /**
  * @file
  *
@@ -12,6 +11,10 @@
  *
  */
 
+#include <stdint.h>
+#include <rte_common.h>
+#include <rte_atomic.h>
+
 /**
  * Pause CPU execution for a short while
  *
@@ -20,4 +23,38 @@
  */
 static inline void rte_pause(void);
 
+#if !defined(RTE_USE_WFE)
+#ifdef RTE_USE_C11_MEM_MODEL
+#define __rte_wait_until_equal(addr, expected, memorder) do {\
+	while (__atomic_load_n(addr, memorder) != expected) \
+		rte_pause();\
+} while (0)
+#else
+#define __rte_wait_until_equal(addr, expected, memorder) do {\
+	while (*addr != expected)\
+		rte_pause();\
+	if (memorder != __ATOMIC_RELAXED)\
+		rte_smp_rmb();\
+} while (0)
+#endif
+
+static __rte_always_inline void
+rte_wait_until_equal16(volatile uint16_t *addr, uint16_t expected, int memorder)
+{
+	__rte_wait_until_equal(addr, expected, memorder);
+}
+
+static __rte_always_inline void
+rte_wait_until_equal32(volatile uint32_t *addr, uint32_t expected, int memorder)
+{
+	__rte_wait_until_equal(addr, expected, memorder);
+}
+
+static __rte_always_inline void
+rte_wait_until_equal64(volatile uint64_t *addr, uint64_t expected, int memorder)
+{
+	__rte_wait_until_equal(addr, expected, memorder);
+}
+#endif /* RTE_USE_WFE */
+
 #endif /* _RTE_PAUSE_H_ */
-- 
2.7.4



More information about the dev mailing list