+ /* dst |= src */
+ case (BPF_ALU | BPF_OR | BPF_X):
+ case (EBPF_ALU64 | BPF_OR | BPF_X):
+ emit_or(ctx, is64, dst, src);
+ break;
+ /* dst |= imm */
+ case (BPF_ALU | BPF_OR | BPF_K):
+ case (EBPF_ALU64 | BPF_OR | BPF_K):
+ emit_mov_imm(ctx, is64, tmp1, imm);
+ emit_or(ctx, is64, dst, tmp1);
+ break;
+ /* dst &= src */
+ case (BPF_ALU | BPF_AND | BPF_X):
+ case (EBPF_ALU64 | BPF_AND | BPF_X):
+ emit_and(ctx, is64, dst, src);
+ break;
+ /* dst &= imm */
+ case (BPF_ALU | BPF_AND | BPF_K):
+ case (EBPF_ALU64 | BPF_AND | BPF_K):
+ emit_mov_imm(ctx, is64, tmp1, imm);
+ emit_and(ctx, is64, dst, tmp1);
+ break;
+ /* dst ^= src */
+ case (BPF_ALU | BPF_XOR | BPF_X):
+ case (EBPF_ALU64 | BPF_XOR | BPF_X):
+ emit_xor(ctx, is64, dst, src);
+ break;
+ /* dst ^= imm */
+ case (BPF_ALU | BPF_XOR | BPF_K):
+ case (EBPF_ALU64 | BPF_XOR | BPF_K):
+ emit_mov_imm(ctx, is64, tmp1, imm);
+ emit_xor(ctx, is64, dst, tmp1);
+ break;
+ /* dst = -dst */
+ case (BPF_ALU | BPF_NEG):
+ case (EBPF_ALU64 | BPF_NEG):
+ emit_neg(ctx, is64, dst);
+ break;
+ /* dst <<= src */
+ case BPF_ALU | BPF_LSH | BPF_X:
+ case EBPF_ALU64 | BPF_LSH | BPF_X:
+ emit_lslv(ctx, is64, dst, src);
+ break;
+ /* dst <<= imm */
+ case BPF_ALU | BPF_LSH | BPF_K:
+ case EBPF_ALU64 | BPF_LSH | BPF_K:
+ emit_lsl(ctx, is64, dst, imm);
+ break;
+ /* dst >>= src */
+ case BPF_ALU | BPF_RSH | BPF_X:
+ case EBPF_ALU64 | BPF_RSH | BPF_X:
+ emit_lsrv(ctx, is64, dst, src);
+ break;
+ /* dst >>= imm */
+ case BPF_ALU | BPF_RSH | BPF_K:
+ case EBPF_ALU64 | BPF_RSH | BPF_K:
+ emit_lsr(ctx, is64, dst, imm);
+ break;
+ /* dst >>= src (arithmetic) */
+ case BPF_ALU | EBPF_ARSH | BPF_X:
+ case EBPF_ALU64 | EBPF_ARSH | BPF_X:
+ emit_asrv(ctx, is64, dst, src);
+ break;
+ /* dst >>= imm (arithmetic) */
+ case BPF_ALU | EBPF_ARSH | BPF_K:
+ case EBPF_ALU64 | EBPF_ARSH | BPF_K:
+ emit_asr(ctx, is64, dst, imm);
+ break;
+ /* dst = be##imm(dst) */
+ case (BPF_ALU | EBPF_END | EBPF_TO_BE):
+ emit_be(ctx, dst, imm);
+ break;
+ /* dst = le##imm(dst) */
+ case (BPF_ALU | EBPF_END | EBPF_TO_LE):
+ emit_le(ctx, dst, imm);
+ break;
+ /* dst = *(size *) (src + off) */
+ case (BPF_LDX | BPF_MEM | BPF_B):
+ case (BPF_LDX | BPF_MEM | BPF_H):
+ case (BPF_LDX | BPF_MEM | BPF_W):
+ case (BPF_LDX | BPF_MEM | EBPF_DW):
+ emit_mov_imm(ctx, 1, tmp1, off);
+ emit_ldr(ctx, BPF_SIZE(op), dst, src, tmp1);
+ break;
+ /* dst = imm64 */
+ case (BPF_LD | BPF_IMM | EBPF_DW):
+ u64 = ((uint64_t)ins[1].imm << 32) | (uint32_t)imm;
+ emit_mov_imm(ctx, 1, dst, u64);
+ i++;
+ break;
+ /* *(size *)(dst + off) = src */
+ case (BPF_STX | BPF_MEM | BPF_B):
+ case (BPF_STX | BPF_MEM | BPF_H):
+ case (BPF_STX | BPF_MEM | BPF_W):
+ case (BPF_STX | BPF_MEM | EBPF_DW):
+ emit_mov_imm(ctx, 1, tmp1, off);
+ emit_str(ctx, BPF_SIZE(op), src, dst, tmp1);
+ break;
+ /* *(size *)(dst + off) = imm */
+ case (BPF_ST | BPF_MEM | BPF_B):
+ case (BPF_ST | BPF_MEM | BPF_H):
+ case (BPF_ST | BPF_MEM | BPF_W):
+ case (BPF_ST | BPF_MEM | EBPF_DW):
+ emit_mov_imm(ctx, 1, tmp1, imm);
+ emit_mov_imm(ctx, 1, tmp2, off);
+ emit_str(ctx, BPF_SIZE(op), tmp1, dst, tmp2);
+ break;
+ /* STX XADD: lock *(size *)(dst + off) += src */
+ case (BPF_STX | EBPF_XADD | BPF_W):
+ case (BPF_STX | EBPF_XADD | EBPF_DW):
+ emit_xadd(ctx, op, tmp1, tmp2, tmp3, dst, off, src);
+ break;
+ /* PC += off */
+ case (BPF_JMP | BPF_JA):
+ emit_b(ctx, jump_offset_get(ctx, i, off));
+ break;
+ /* PC += off if dst COND imm */
+ case (BPF_JMP | BPF_JEQ | BPF_K):
+ case (BPF_JMP | EBPF_JNE | BPF_K):
+ case (BPF_JMP | BPF_JGT | BPF_K):
+ case (BPF_JMP | EBPF_JLT | BPF_K):
+ case (BPF_JMP | BPF_JGE | BPF_K):
+ case (BPF_JMP | EBPF_JLE | BPF_K):
+ case (BPF_JMP | EBPF_JSGT | BPF_K):
+ case (BPF_JMP | EBPF_JSLT | BPF_K):
+ case (BPF_JMP | EBPF_JSGE | BPF_K):
+ case (BPF_JMP | EBPF_JSLE | BPF_K):
+ emit_mov_imm(ctx, 1, tmp1, imm);
+ emit_cmp(ctx, 1, dst, tmp1);
+ emit_branch(ctx, op, i, off);
+ break;
+ case (BPF_JMP | BPF_JSET | BPF_K):
+ emit_mov_imm(ctx, 1, tmp1, imm);
+ emit_tst(ctx, 1, dst, tmp1);
+ emit_branch(ctx, op, i, off);
+ break;
+ /* PC += off if dst COND src */
+ case (BPF_JMP | BPF_JEQ | BPF_X):
+ case (BPF_JMP | EBPF_JNE | BPF_X):
+ case (BPF_JMP | BPF_JGT | BPF_X):
+ case (BPF_JMP | EBPF_JLT | BPF_X):
+ case (BPF_JMP | BPF_JGE | BPF_X):
+ case (BPF_JMP | EBPF_JLE | BPF_X):
+ case (BPF_JMP | EBPF_JSGT | BPF_X):
+ case (BPF_JMP | EBPF_JSLT | BPF_X):
+ case (BPF_JMP | EBPF_JSGE | BPF_X):
+ case (BPF_JMP | EBPF_JSLE | BPF_X):
+ emit_cmp(ctx, 1, dst, src);
+ emit_branch(ctx, op, i, off);
+ break;
+ case (BPF_JMP | BPF_JSET | BPF_X):
+ emit_tst(ctx, 1, dst, src);
+ emit_branch(ctx, op, i, off);
+ break;
+ /* Call imm */
+ case (BPF_JMP | EBPF_CALL):
+ emit_call(ctx, tmp1, bpf->prm.xsym[ins->imm].func.val);
+ break;