1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2015 Cavium, Inc
3 * Copyright(c) 2019 Arm Limited
6 #ifndef _RTE_ATOMIC_ARM64_H_
7 #define _RTE_ATOMIC_ARM64_H_
9 #ifndef RTE_FORCE_INTRINSICS
10 # error Platform must be built with CONFIG_RTE_FORCE_INTRINSICS
17 #include "generic/rte_atomic.h"
18 #include <rte_branch_prediction.h>
19 #include <rte_compat.h>
20 #include <rte_debug.h>
22 #define rte_mb() asm volatile("dsb sy" : : : "memory")
24 #define rte_wmb() asm volatile("dsb st" : : : "memory")
26 #define rte_rmb() asm volatile("dsb ld" : : : "memory")
28 #define rte_smp_mb() asm volatile("dmb ish" : : : "memory")
30 #define rte_smp_wmb() asm volatile("dmb ishst" : : : "memory")
32 #define rte_smp_rmb() asm volatile("dmb ishld" : : : "memory")
34 #define rte_io_mb() rte_mb()
36 #define rte_io_wmb() rte_wmb()
38 #define rte_io_rmb() rte_rmb()
40 #define rte_cio_wmb() asm volatile("dmb oshst" : : : "memory")
42 #define rte_cio_rmb() asm volatile("dmb oshld" : : : "memory")
44 /*------------------------ 128 bit atomic operations -------------------------*/
46 #if defined(__ARM_FEATURE_ATOMICS) || defined(RTE_ARM_FEATURE_ATOMICS)
47 #define __ATOMIC128_CAS_OP(cas_op_name, op_string) \
48 static __rte_noinline rte_int128_t \
49 cas_op_name(rte_int128_t *dst, rte_int128_t old, rte_int128_t updated) \
51 /* caspX instructions register pair must start from even-numbered
52 * register at operand 1.
53 * So, specify registers for local variables here.
55 register uint64_t x0 __asm("x0") = (uint64_t)old.val[0]; \
56 register uint64_t x1 __asm("x1") = (uint64_t)old.val[1]; \
57 register uint64_t x2 __asm("x2") = (uint64_t)updated.val[0]; \
58 register uint64_t x3 __asm("x3") = (uint64_t)updated.val[1]; \
60 op_string " %[old0], %[old1], %[upd0], %[upd1], [%[dst]]" \
72 __ATOMIC128_CAS_OP(__cas_128_relaxed, "casp")
73 __ATOMIC128_CAS_OP(__cas_128_acquire, "caspa")
74 __ATOMIC128_CAS_OP(__cas_128_release, "caspl")
75 __ATOMIC128_CAS_OP(__cas_128_acq_rel, "caspal")
77 #undef __ATOMIC128_CAS_OP
83 rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp,
84 const rte_int128_t *src, unsigned int weak, int success,
87 /* Always do strong CAS */
89 /* Ignore memory ordering for failure, memory order for
90 * success must be stronger or equal
92 RTE_SET_USED(failure);
93 /* Find invalid memory order */
94 RTE_ASSERT(success == __ATOMIC_RELAXED ||
95 success == __ATOMIC_ACQUIRE ||
96 success == __ATOMIC_RELEASE ||
97 success == __ATOMIC_ACQ_REL ||
98 success == __ATOMIC_SEQ_CST);
100 rte_int128_t expected = *exp;
101 rte_int128_t desired = *src;
104 #if defined(__ARM_FEATURE_ATOMICS) || defined(RTE_ARM_FEATURE_ATOMICS)
105 if (success == __ATOMIC_RELAXED)
106 old = __cas_128_relaxed(dst, expected, desired);
107 else if (success == __ATOMIC_ACQUIRE)
108 old = __cas_128_acquire(dst, expected, desired);
109 else if (success == __ATOMIC_RELEASE)
110 old = __cas_128_release(dst, expected, desired);
112 old = __cas_128_acq_rel(dst, expected, desired);
114 #define __HAS_ACQ(mo) ((mo) != __ATOMIC_RELAXED && (mo) != __ATOMIC_RELEASE)
115 #define __HAS_RLS(mo) ((mo) == __ATOMIC_RELEASE || (mo) == __ATOMIC_ACQ_REL || \
116 (mo) == __ATOMIC_SEQ_CST)
118 int ldx_mo = __HAS_ACQ(success) ? __ATOMIC_ACQUIRE : __ATOMIC_RELAXED;
119 int stx_mo = __HAS_RLS(success) ? __ATOMIC_RELEASE : __ATOMIC_RELAXED;
126 /* ldx128 can not guarantee atomic,
127 * Must write back src or old to verify atomicity of ldx128;
131 #define __LOAD_128(op_string, src, dst) { \
133 op_string " %0, %1, %2" \
134 : "=&r" (dst.val[0]), \
136 : "Q" (src->val[0]) \
139 if (ldx_mo == __ATOMIC_RELAXED)
140 __LOAD_128("ldxp", dst, old)
142 __LOAD_128("ldaxp", dst, old)
146 #define __STORE_128(op_string, dst, src, ret) { \
148 op_string " %w0, %1, %2, %3" \
150 : "r" (src.val[0]), \
155 if (likely(old.int128 == expected.int128)) {
156 if (stx_mo == __ATOMIC_RELAXED)
157 __STORE_128("stxp", dst, desired, ret)
159 __STORE_128("stlxp", dst, desired, ret)
161 /* In the failure case (since 'weak' is ignored and only
162 * weak == 0 is implemented), expected should contain
163 * the atomically read value of dst. This means, 'old'
164 * needs to be stored back to ensure it was read
167 if (stx_mo == __ATOMIC_RELAXED)
168 __STORE_128("stxp", dst, old, ret)
170 __STORE_128("stlxp", dst, old, ret)
175 } while (unlikely(ret));
178 /* Unconditionally updating expected removes an 'if' statement.
179 * expected should already be in register if not in the cache.
183 return (old.int128 == expected.int128);
190 #endif /* _RTE_ATOMIC_ARM64_H_ */