/*------------------------ 128 bit atomic operations -------------------------*/
#if defined(__ARM_FEATURE_ATOMICS) || defined(RTE_ARM_FEATURE_ATOMICS)
-#if defined(RTE_CC_CLANG)
-#define __LSE_PREAMBLE ".arch armv8-a+lse\n"
-#else
-#define __LSE_PREAMBLE ""
-#endif
+#if defined(RTE_CC_CLANG)
#define __ATOMIC128_CAS_OP(cas_op_name, op_string) \
static __rte_noinline void \
cas_op_name(rte_int128_t *dst, rte_int128_t *old, rte_int128_t updated) \
register uint64_t x2 __asm("x2") = (uint64_t)updated.val[0]; \
register uint64_t x3 __asm("x3") = (uint64_t)updated.val[1]; \
asm volatile( \
- __LSE_PREAMBLE \
+ ".arch armv8-a+lse\n" \
op_string " %[old0], %[old1], %[upd0], %[upd1], [%[dst]]" \
: [old0] "+r" (x0), \
[old1] "+r" (x1) \
old->val[0] = x0; \
old->val[1] = x1; \
}
+#else
+#define __ATOMIC128_CAS_OP(cas_op_name, op_string) \
+static __rte_always_inline void \
+cas_op_name(rte_int128_t *dst, rte_int128_t *old, rte_int128_t updated) \
+{ \
+ asm volatile( \
+ op_string " %[old], %H[old], %[upd], %H[upd], [%[dst]]" \
+ : [old] "+r"(old->int128) \
+ : [upd] "r"(updated.int128), [dst] "r"(dst) \
+ : "memory"); \
+}
+#endif
__ATOMIC128_CAS_OP(__cas_128_relaxed, "casp")
__ATOMIC128_CAS_OP(__cas_128_acquire, "caspa")
__ATOMIC128_CAS_OP(__cas_128_release, "caspl")
__ATOMIC128_CAS_OP(__cas_128_acq_rel, "caspal")
-#undef __LSE_PREAMBLE
#undef __ATOMIC128_CAS_OP
#endif