1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2017 Cavium, Inc
3 * Copyright(c) 2019 Arm Limited
6 #ifndef _RTE_PAUSE_ARM64_H_
7 #define _RTE_PAUSE_ARM64_H_
13 #include <rte_common.h>
15 #ifdef RTE_ARM_USE_WFE
16 #define RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED
19 #include "generic/rte_pause.h"
21 static inline void rte_pause(void)
23 asm volatile("yield" ::: "memory");
26 #ifdef RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED
28 /* Send an event to quit WFE. */
29 #define __RTE_ARM_SEVL() { asm volatile("sevl" : : : "memory"); }
31 /* Put processor into low power WFE(Wait For Event) state. */
32 #define __RTE_ARM_WFE() { asm volatile("wfe" : : : "memory"); }
35 * Atomic exclusive load from addr, it returns the 16-bit content of
36 * *addr while making it 'monitored', when it is written by someone
37 * else, the 'monitored' state is cleared and an event is generated
38 * implicitly to exit WFE.
40 #define __RTE_ARM_LOAD_EXC_16(src, dst, memorder) { \
41 if (memorder == __ATOMIC_RELAXED) { \
42 asm volatile("ldxrh %w[tmp], [%x[addr]]" \
47 asm volatile("ldaxrh %w[tmp], [%x[addr]]" \
54 * Atomic exclusive load from addr, it returns the 32-bit content of
55 * *addr while making it 'monitored', when it is written by someone
56 * else, the 'monitored' state is cleared and an event is generated
57 * implicitly to exit WFE.
59 #define __RTE_ARM_LOAD_EXC_32(src, dst, memorder) { \
60 if (memorder == __ATOMIC_RELAXED) { \
61 asm volatile("ldxr %w[tmp], [%x[addr]]" \
66 asm volatile("ldaxr %w[tmp], [%x[addr]]" \
73 * Atomic exclusive load from addr, it returns the 64-bit content of
74 * *addr while making it 'monitored', when it is written by someone
75 * else, the 'monitored' state is cleared and an event is generated
76 * implicitly to exit WFE.
78 #define __RTE_ARM_LOAD_EXC_64(src, dst, memorder) { \
79 if (memorder == __ATOMIC_RELAXED) { \
80 asm volatile("ldxr %x[tmp], [%x[addr]]" \
85 asm volatile("ldaxr %x[tmp], [%x[addr]]" \
92 * Atomic exclusive load from addr, it returns the 128-bit content of
93 * *addr while making it 'monitored', when it is written by someone
94 * else, the 'monitored' state is cleared and an event is generated
95 * implicitly to exit WFE.
97 #define __RTE_ARM_LOAD_EXC_128(src, dst, memorder) { \
98 volatile rte_int128_t *dst_128 = (volatile rte_int128_t *)&dst; \
99 if (memorder == __ATOMIC_RELAXED) { \
100 asm volatile("ldxp %x[tmp0], %x[tmp1], [%x[addr]]" \
101 : [tmp0] "=&r" (dst_128->val[0]), \
102 [tmp1] "=&r" (dst_128->val[1]) \
106 asm volatile("ldaxp %x[tmp0], %x[tmp1], [%x[addr]]" \
107 : [tmp0] "=&r" (dst_128->val[0]), \
108 [tmp1] "=&r" (dst_128->val[1]) \
113 #define __RTE_ARM_LOAD_EXC(src, dst, memorder, size) { \
114 RTE_BUILD_BUG_ON(size != 16 && size != 32 && \
115 size != 64 && size != 128); \
117 __RTE_ARM_LOAD_EXC_16(src, dst, memorder) \
118 else if (size == 32) \
119 __RTE_ARM_LOAD_EXC_32(src, dst, memorder) \
120 else if (size == 64) \
121 __RTE_ARM_LOAD_EXC_64(src, dst, memorder) \
122 else if (size == 128) \
123 __RTE_ARM_LOAD_EXC_128(src, dst, memorder) \
126 static __rte_always_inline void
127 rte_wait_until_equal_16(volatile uint16_t *addr, uint16_t expected,
132 RTE_BUILD_BUG_ON(memorder != __ATOMIC_ACQUIRE &&
133 memorder != __ATOMIC_RELAXED);
135 __RTE_ARM_LOAD_EXC_16(addr, value, memorder)
136 if (value != expected) {
140 __RTE_ARM_LOAD_EXC_16(addr, value, memorder)
141 } while (value != expected);
145 static __rte_always_inline void
146 rte_wait_until_equal_32(volatile uint32_t *addr, uint32_t expected,
151 RTE_BUILD_BUG_ON(memorder != __ATOMIC_ACQUIRE &&
152 memorder != __ATOMIC_RELAXED);
154 __RTE_ARM_LOAD_EXC_32(addr, value, memorder)
155 if (value != expected) {
159 __RTE_ARM_LOAD_EXC_32(addr, value, memorder)
160 } while (value != expected);
164 static __rte_always_inline void
165 rte_wait_until_equal_64(volatile uint64_t *addr, uint64_t expected,
170 RTE_BUILD_BUG_ON(memorder != __ATOMIC_ACQUIRE &&
171 memorder != __ATOMIC_RELAXED);
173 __RTE_ARM_LOAD_EXC_64(addr, value, memorder)
174 if (value != expected) {
178 __RTE_ARM_LOAD_EXC_64(addr, value, memorder)
179 } while (value != expected);
183 #define RTE_WAIT_UNTIL_MASKED(addr, mask, cond, expected, memorder) do { \
184 RTE_BUILD_BUG_ON(!__builtin_constant_p(memorder)); \
185 RTE_BUILD_BUG_ON(memorder != __ATOMIC_ACQUIRE && \
186 memorder != __ATOMIC_RELAXED); \
187 const uint32_t size = sizeof(*(addr)) << 3; \
188 typeof(*(addr)) expected_value = (expected); \
189 typeof(*(addr)) value; \
190 __RTE_ARM_LOAD_EXC((addr), value, memorder, size) \
191 if (!((value & (mask)) cond expected_value)) { \
195 __RTE_ARM_LOAD_EXC((addr), value, memorder, size) \
196 } while (!((value & (mask)) cond expected_value)); \
200 #endif /* RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED */
206 #endif /* _RTE_PAUSE_ARM64_H_ */