+static void
+emit_stadd(struct a64_jit_ctx *ctx, bool is64, uint8_t rs, uint8_t rn)
+{
+ uint32_t insn;
+
+ insn = 0xb820001f;
+ insn |= (!!is64) << 30;
+ insn |= rs << 16;
+ insn |= rn << 5;
+
+ emit_insn(ctx, insn, check_reg(rs) || check_reg(rn));
+}
+
+static void
+emit_ldxr(struct a64_jit_ctx *ctx, bool is64, uint8_t rt, uint8_t rn)
+{
+ uint32_t insn;
+
+ insn = 0x885f7c00;
+ insn |= (!!is64) << 30;
+ insn |= rn << 5;
+ insn |= rt;
+
+ emit_insn(ctx, insn, check_reg(rt) || check_reg(rn));
+}
+
+static void
+emit_stxr(struct a64_jit_ctx *ctx, bool is64, uint8_t rs, uint8_t rt,
+ uint8_t rn)
+{
+ uint32_t insn;
+
+ insn = 0x88007c00;
+ insn |= (!!is64) << 30;
+ insn |= rs << 16;
+ insn |= rn << 5;
+ insn |= rt;
+
+ emit_insn(ctx, insn, check_reg(rs) || check_reg(rt) || check_reg(rn));
+}
+
+static int
+has_atomics(void)
+{
+ int rc = 0;
+
+#if defined(__ARM_FEATURE_ATOMICS) || defined(RTE_ARM_FEATURE_ATOMICS)
+ rc = 1;
+#endif
+ return rc;
+}
+
+static void
+emit_xadd(struct a64_jit_ctx *ctx, uint8_t op, uint8_t tmp1, uint8_t tmp2,
+ uint8_t tmp3, uint8_t dst, int16_t off, uint8_t src)
+{
+ bool is64 = (BPF_SIZE(op) == EBPF_DW);
+ uint8_t rn;
+
+ if (off) {
+ emit_mov_imm(ctx, 1, tmp1, off);
+ emit_add(ctx, 1, tmp1, dst);
+ rn = tmp1;
+ } else {
+ rn = dst;
+ }
+
+ if (has_atomics()) {
+ emit_stadd(ctx, is64, src, rn);
+ } else {
+ emit_ldxr(ctx, is64, tmp2, rn);
+ emit_add(ctx, is64, tmp2, src);
+ emit_stxr(ctx, is64, tmp3, tmp2, rn);
+ emit_cbnz(ctx, is64, tmp3, -3);
+ }
+}
+
+#define A64_CMP 0x6b00000f
+#define A64_TST 0x6a00000f
+static void
+emit_cmp_tst(struct a64_jit_ctx *ctx, bool is64, uint8_t rn, uint8_t rm,
+ uint32_t opc)
+{
+ uint32_t insn;
+
+ insn = opc;
+ insn |= (!!is64) << 31;
+ insn |= rm << 16;
+ insn |= rn << 5;
+
+ emit_insn(ctx, insn, check_reg(rn) || check_reg(rm));
+}
+
+static void
+emit_cmp(struct a64_jit_ctx *ctx, bool is64, uint8_t rn, uint8_t rm)
+{
+ emit_cmp_tst(ctx, is64, rn, rm, A64_CMP);
+}
+
+static void
+emit_tst(struct a64_jit_ctx *ctx, bool is64, uint8_t rn, uint8_t rm)
+{
+ emit_cmp_tst(ctx, is64, rn, rm, A64_TST);
+}
+
+static void
+emit_b_cond(struct a64_jit_ctx *ctx, uint8_t cond, int32_t imm19)
+{
+ uint32_t insn, imm;
+
+ imm = mask_imm(19, imm19);
+ insn = 0x15 << 26;
+ insn |= imm << 5;
+ insn |= cond;
+
+ emit_insn(ctx, insn, check_cond(cond) || check_imm(19, imm19));
+}
+
+static void
+emit_branch(struct a64_jit_ctx *ctx, uint8_t op, uint32_t i, int16_t off)
+{
+ jump_offset_to_branch_update(ctx, i);
+ emit_b_cond(ctx, ebpf_to_a64_cond(op), jump_offset_get(ctx, i, off));
+}
+