#define LOONGARCH_BPF_FENTRY_NBYTES (LOONGARCH_LONG_JUMP_NINSNS * 4)
#define REG_TCC LOONGARCH_GPR_A6
+#define REG_ARENA LOONGARCH_GPR_S6 /* For storing arena_vm_start */
#define BPF_TAIL_CALL_CNT_PTR_STACK_OFF(stack) (round_up(stack, 16) - 80)
static const int regmap[] = {
/* To store tcc and tcc_ptr */
stack_adjust += sizeof(long) * 2;
+ if (ctx->arena_vm_start)
+ stack_adjust += 8;
+
stack_adjust = round_up(stack_adjust, 16);
stack_adjust += bpf_stack_adjust;
store_offset -= sizeof(long);
emit_insn(ctx, std, LOONGARCH_GPR_S5, LOONGARCH_GPR_SP, store_offset);
+ if (ctx->arena_vm_start) {
+ store_offset -= sizeof(long);
+ emit_insn(ctx, std, REG_ARENA, LOONGARCH_GPR_SP, store_offset);
+ }
+
prepare_bpf_tail_call_cnt(ctx, &store_offset);
emit_insn(ctx, addid, LOONGARCH_GPR_FP, LOONGARCH_GPR_SP, stack_adjust);
emit_insn(ctx, addid, regmap[BPF_REG_FP], LOONGARCH_GPR_SP, bpf_stack_adjust);
ctx->stack_size = stack_adjust;
+
+ if (ctx->arena_vm_start)
+ move_imm(ctx, REG_ARENA, ctx->arena_vm_start, false);
}
static void __build_epilogue(struct jit_ctx *ctx, bool is_tail_call)
load_offset -= sizeof(long);
emit_insn(ctx, ldd, LOONGARCH_GPR_S5, LOONGARCH_GPR_SP, load_offset);
+ if (ctx->arena_vm_start) {
+ load_offset -= sizeof(long);
+ emit_insn(ctx, ldd, REG_ARENA, LOONGARCH_GPR_SP, load_offset);
+ }
+
/*
* When push into the stack, follow the order of tcc then tcc_ptr.
* When pop from the stack, first pop tcc_ptr then followed by tcc.
#define BPF_FIXUP_REG_MASK GENMASK(31, 27)
#define BPF_FIXUP_OFFSET_MASK GENMASK(26, 0)
+#define REG_DONT_CLEAR_MARKER 0
bool ex_handler_bpf(const struct exception_table_entry *ex,
struct pt_regs *regs)
int dst_reg = FIELD_GET(BPF_FIXUP_REG_MASK, ex->fixup);
off_t offset = FIELD_GET(BPF_FIXUP_OFFSET_MASK, ex->fixup);
- regs->regs[dst_reg] = 0;
+ if (dst_reg != REG_DONT_CLEAR_MARKER)
+ regs->regs[dst_reg] = 0;
regs->csr_era = (unsigned long)&ex->fixup - offset;
return true;
return 0;
if (BPF_MODE(insn->code) != BPF_PROBE_MEM &&
- BPF_MODE(insn->code) != BPF_PROBE_MEMSX)
+ BPF_MODE(insn->code) != BPF_PROBE_MEMSX &&
+ BPF_MODE(insn->code) != BPF_PROBE_MEM32)
return 0;
if (WARN_ON_ONCE(ctx->num_exentries >= ctx->prog->aux->num_exentries))
const u8 cond = BPF_OP(code);
const u8 t1 = LOONGARCH_GPR_T1;
const u8 t2 = LOONGARCH_GPR_T2;
- const u8 src = regmap[insn->src_reg];
- const u8 dst = regmap[insn->dst_reg];
+ const u8 t3 = LOONGARCH_GPR_T3;
+ u8 src = regmap[insn->src_reg];
+ u8 dst = regmap[insn->dst_reg];
const s16 off = insn->off;
const s32 imm = insn->imm;
const bool is32 = BPF_CLASS(insn->code) == BPF_ALU || BPF_CLASS(insn->code) == BPF_JMP32;
case BPF_LDX | BPF_PROBE_MEMSX | BPF_B:
case BPF_LDX | BPF_PROBE_MEMSX | BPF_H:
case BPF_LDX | BPF_PROBE_MEMSX | BPF_W:
- sign_extend = BPF_MODE(insn->code) == BPF_MEMSX ||
- BPF_MODE(insn->code) == BPF_PROBE_MEMSX;
+ /* LDX | PROBE_MEM32: dst = *(unsigned size *)(src + REG_ARENA + off) */
+ case BPF_LDX | BPF_PROBE_MEM32 | BPF_B:
+ case BPF_LDX | BPF_PROBE_MEM32 | BPF_H:
+ case BPF_LDX | BPF_PROBE_MEM32 | BPF_W:
+ case BPF_LDX | BPF_PROBE_MEM32 | BPF_DW:
+ sign_extend = BPF_MODE(code) == BPF_MEMSX ||
+ BPF_MODE(code) == BPF_PROBE_MEMSX;
+
+ if (BPF_MODE(code) == BPF_PROBE_MEM32) {
+ emit_insn(ctx, addd, t2, src, REG_ARENA);
+ src = t2;
+ }
+
switch (BPF_SIZE(code)) {
case BPF_B:
if (is_signed_imm12(off)) {
case BPF_ST | BPF_MEM | BPF_H:
case BPF_ST | BPF_MEM | BPF_W:
case BPF_ST | BPF_MEM | BPF_DW:
+ /* ST | PROBE_MEM32: *(size *)(dst + REG_ARENA + off) = imm */
+ case BPF_ST | BPF_PROBE_MEM32 | BPF_B:
+ case BPF_ST | BPF_PROBE_MEM32 | BPF_H:
+ case BPF_ST | BPF_PROBE_MEM32 | BPF_W:
+ case BPF_ST | BPF_PROBE_MEM32 | BPF_DW:
+ if (BPF_MODE(code) == BPF_PROBE_MEM32) {
+ emit_insn(ctx, addd, t3, dst, REG_ARENA);
+ dst = t3;
+ }
+
switch (BPF_SIZE(code)) {
case BPF_B:
move_imm(ctx, t1, imm, is32);
}
break;
}
+
+ ret = add_exception_handler(insn, ctx, REG_DONT_CLEAR_MARKER);
+ if (ret)
+ return ret;
break;
/* *(size *)(dst + off) = src */
case BPF_STX | BPF_MEM | BPF_H:
case BPF_STX | BPF_MEM | BPF_W:
case BPF_STX | BPF_MEM | BPF_DW:
+ /* STX | PROBE_MEM32: *(size *)(dst + REG_ARENA + off) = src */
+ case BPF_STX | BPF_PROBE_MEM32 | BPF_B:
+ case BPF_STX | BPF_PROBE_MEM32 | BPF_H:
+ case BPF_STX | BPF_PROBE_MEM32 | BPF_W:
+ case BPF_STX | BPF_PROBE_MEM32 | BPF_DW:
+ if (BPF_MODE(code) == BPF_PROBE_MEM32) {
+ emit_insn(ctx, addd, t2, dst, REG_ARENA);
+ dst = t2;
+ }
+
switch (BPF_SIZE(code)) {
case BPF_B:
if (is_signed_imm12(off)) {
}
break;
}
+
+ ret = add_exception_handler(insn, ctx, REG_DONT_CLEAR_MARKER);
+ if (ret)
+ return ret;
break;
case BPF_STX | BPF_ATOMIC | BPF_W:
memset(&ctx, 0, sizeof(ctx));
ctx.prog = prog;
+ ctx.arena_vm_start = bpf_arena_get_kern_vm_start(prog->aux->arena);
ctx.offset = kvcalloc(prog->len + 1, sizeof(u32), GFP_KERNEL);
if (ctx.offset == NULL) {