1 // SPDX-License-Identifier: GPL-2.0
2 /* BPF JIT compiler for RV64G
4 * Copyright(c) 2019 Björn Töpel <bjorn.topel@gmail.com>
8 #include <linux/bitfield.h>
10 #include <linux/filter.h>
13 #define RV_REG_TCC RV_REG_A6
14 #define RV_REG_TCC_SAVED RV_REG_S6 /* Store A6 in S6 if program do calls */
16 static const int regmap[] = {
17 [BPF_REG_0] = RV_REG_A5,
18 [BPF_REG_1] = RV_REG_A0,
19 [BPF_REG_2] = RV_REG_A1,
20 [BPF_REG_3] = RV_REG_A2,
21 [BPF_REG_4] = RV_REG_A3,
22 [BPF_REG_5] = RV_REG_A4,
23 [BPF_REG_6] = RV_REG_S1,
24 [BPF_REG_7] = RV_REG_S2,
25 [BPF_REG_8] = RV_REG_S3,
26 [BPF_REG_9] = RV_REG_S4,
27 [BPF_REG_FP] = RV_REG_S5,
28 [BPF_REG_AX] = RV_REG_T0,
31 static const int pt_regmap[] = {
32 [RV_REG_A0] = offsetof(struct pt_regs, a0),
33 [RV_REG_A1] = offsetof(struct pt_regs, a1),
34 [RV_REG_A2] = offsetof(struct pt_regs, a2),
35 [RV_REG_A3] = offsetof(struct pt_regs, a3),
36 [RV_REG_A4] = offsetof(struct pt_regs, a4),
37 [RV_REG_A5] = offsetof(struct pt_regs, a5),
38 [RV_REG_S1] = offsetof(struct pt_regs, s1),
39 [RV_REG_S2] = offsetof(struct pt_regs, s2),
40 [RV_REG_S3] = offsetof(struct pt_regs, s3),
41 [RV_REG_S4] = offsetof(struct pt_regs, s4),
42 [RV_REG_S5] = offsetof(struct pt_regs, s5),
43 [RV_REG_T0] = offsetof(struct pt_regs, t0),
47 RV_CTX_F_SEEN_TAIL_CALL = 0,
48 RV_CTX_F_SEEN_CALL = RV_REG_RA,
49 RV_CTX_F_SEEN_S1 = RV_REG_S1,
50 RV_CTX_F_SEEN_S2 = RV_REG_S2,
51 RV_CTX_F_SEEN_S3 = RV_REG_S3,
52 RV_CTX_F_SEEN_S4 = RV_REG_S4,
53 RV_CTX_F_SEEN_S5 = RV_REG_S5,
54 RV_CTX_F_SEEN_S6 = RV_REG_S6,
57 static u8 bpf_to_rv_reg(int bpf_reg, struct rv_jit_context *ctx)
59 u8 reg = regmap[bpf_reg];
62 case RV_CTX_F_SEEN_S1:
63 case RV_CTX_F_SEEN_S2:
64 case RV_CTX_F_SEEN_S3:
65 case RV_CTX_F_SEEN_S4:
66 case RV_CTX_F_SEEN_S5:
67 case RV_CTX_F_SEEN_S6:
68 __set_bit(reg, &ctx->flags);
73 static bool seen_reg(int reg, struct rv_jit_context *ctx)
76 case RV_CTX_F_SEEN_CALL:
77 case RV_CTX_F_SEEN_S1:
78 case RV_CTX_F_SEEN_S2:
79 case RV_CTX_F_SEEN_S3:
80 case RV_CTX_F_SEEN_S4:
81 case RV_CTX_F_SEEN_S5:
82 case RV_CTX_F_SEEN_S6:
83 return test_bit(reg, &ctx->flags);
88 static void mark_fp(struct rv_jit_context *ctx)
90 __set_bit(RV_CTX_F_SEEN_S5, &ctx->flags);
93 static void mark_call(struct rv_jit_context *ctx)
95 __set_bit(RV_CTX_F_SEEN_CALL, &ctx->flags);
98 static bool seen_call(struct rv_jit_context *ctx)
100 return test_bit(RV_CTX_F_SEEN_CALL, &ctx->flags);
103 static void mark_tail_call(struct rv_jit_context *ctx)
105 __set_bit(RV_CTX_F_SEEN_TAIL_CALL, &ctx->flags);
108 static bool seen_tail_call(struct rv_jit_context *ctx)
110 return test_bit(RV_CTX_F_SEEN_TAIL_CALL, &ctx->flags);
113 static u8 rv_tail_call_reg(struct rv_jit_context *ctx)
117 if (seen_call(ctx)) {
118 __set_bit(RV_CTX_F_SEEN_S6, &ctx->flags);
124 static bool is_32b_int(s64 val)
126 return -(1L << 31) <= val && val < (1L << 31);
129 static bool in_auipc_jalr_range(s64 val)
132 * auipc+jalr can reach any signed PC-relative offset in the range
133 * [-2^31 - 2^11, 2^31 - 2^11).
135 return (-(1L << 31) - (1L << 11)) <= val &&
136 val < ((1L << 31) - (1L << 11));
139 static void emit_imm(u8 rd, s64 val, struct rv_jit_context *ctx)
141 /* Note that the immediate from the add is sign-extended,
142 * which means that we need to compensate this by adding 2^12,
143 * when the 12th bit is set. A simpler way of doing this, and
144 * getting rid of the check, is to just add 2**11 before the
145 * shift. The "Loading a 32-Bit constant" example from the
146 * "Computer Organization and Design, RISC-V edition" book by
147 * Patterson/Hennessy highlights this fact.
149 * This also means that we need to process LSB to MSB.
151 s64 upper = (val + (1 << 11)) >> 12;
152 /* Sign-extend lower 12 bits to 64 bits since immediates for li, addiw,
153 * and addi are signed and RVC checks will perform signed comparisons.
155 s64 lower = ((val & 0xfff) << 52) >> 52;
158 if (is_32b_int(val)) {
160 emit_lui(rd, upper, ctx);
163 emit_li(rd, lower, ctx);
167 emit_addiw(rd, rd, lower, ctx);
171 shift = __ffs(upper);
175 emit_imm(rd, upper, ctx);
177 emit_slli(rd, rd, shift, ctx);
179 emit_addi(rd, rd, lower, ctx);
182 static void __build_epilogue(bool is_tail_call, struct rv_jit_context *ctx)
184 int stack_adjust = ctx->stack_size, store_offset = stack_adjust - 8;
186 if (seen_reg(RV_REG_RA, ctx)) {
187 emit_ld(RV_REG_RA, store_offset, RV_REG_SP, ctx);
190 emit_ld(RV_REG_FP, store_offset, RV_REG_SP, ctx);
192 if (seen_reg(RV_REG_S1, ctx)) {
193 emit_ld(RV_REG_S1, store_offset, RV_REG_SP, ctx);
196 if (seen_reg(RV_REG_S2, ctx)) {
197 emit_ld(RV_REG_S2, store_offset, RV_REG_SP, ctx);
200 if (seen_reg(RV_REG_S3, ctx)) {
201 emit_ld(RV_REG_S3, store_offset, RV_REG_SP, ctx);
204 if (seen_reg(RV_REG_S4, ctx)) {
205 emit_ld(RV_REG_S4, store_offset, RV_REG_SP, ctx);
208 if (seen_reg(RV_REG_S5, ctx)) {
209 emit_ld(RV_REG_S5, store_offset, RV_REG_SP, ctx);
212 if (seen_reg(RV_REG_S6, ctx)) {
213 emit_ld(RV_REG_S6, store_offset, RV_REG_SP, ctx);
217 emit_addi(RV_REG_SP, RV_REG_SP, stack_adjust, ctx);
218 /* Set return value. */
220 emit_mv(RV_REG_A0, RV_REG_A5, ctx);
221 emit_jalr(RV_REG_ZERO, is_tail_call ? RV_REG_T3 : RV_REG_RA,
222 is_tail_call ? 4 : 0, /* skip TCC init */
226 static void emit_bcc(u8 cond, u8 rd, u8 rs, int rvoff,
227 struct rv_jit_context *ctx)
231 emit(rv_beq(rd, rs, rvoff >> 1), ctx);
234 emit(rv_bltu(rs, rd, rvoff >> 1), ctx);
237 emit(rv_bltu(rd, rs, rvoff >> 1), ctx);
240 emit(rv_bgeu(rd, rs, rvoff >> 1), ctx);
243 emit(rv_bgeu(rs, rd, rvoff >> 1), ctx);
246 emit(rv_bne(rd, rs, rvoff >> 1), ctx);
249 emit(rv_blt(rs, rd, rvoff >> 1), ctx);
252 emit(rv_blt(rd, rs, rvoff >> 1), ctx);
255 emit(rv_bge(rd, rs, rvoff >> 1), ctx);
258 emit(rv_bge(rs, rd, rvoff >> 1), ctx);
262 static void emit_branch(u8 cond, u8 rd, u8 rs, int rvoff,
263 struct rv_jit_context *ctx)
267 if (is_13b_int(rvoff)) {
268 emit_bcc(cond, rd, rs, rvoff, ctx);
283 cond = invert_bpf_cond(cond);
284 if (is_21b_int(rvoff)) {
285 emit_bcc(cond, rd, rs, 8, ctx);
286 emit(rv_jal(RV_REG_ZERO, rvoff >> 1), ctx);
290 /* 32b No need for an additional rvoff adjustment, since we
291 * get that from the auipc at PC', where PC = PC' + 4.
293 upper = (rvoff + (1 << 11)) >> 12;
294 lower = rvoff & 0xfff;
296 emit_bcc(cond, rd, rs, 12, ctx);
297 emit(rv_auipc(RV_REG_T1, upper), ctx);
298 emit(rv_jalr(RV_REG_ZERO, RV_REG_T1, lower), ctx);
301 static void emit_zext_32(u8 reg, struct rv_jit_context *ctx)
303 emit_slli(reg, reg, 32, ctx);
304 emit_srli(reg, reg, 32, ctx);
307 static int emit_bpf_tail_call(int insn, struct rv_jit_context *ctx)
309 int tc_ninsn, off, start_insn = ctx->ninsns;
310 u8 tcc = rv_tail_call_reg(ctx);
316 * if (index >= array->map.max_entries)
319 tc_ninsn = insn ? ctx->offset[insn] - ctx->offset[insn - 1] :
321 emit_zext_32(RV_REG_A2, ctx);
323 off = offsetof(struct bpf_array, map.max_entries);
324 if (is_12b_check(off, insn))
326 emit(rv_lwu(RV_REG_T1, off, RV_REG_A1), ctx);
327 off = ninsns_rvoff(tc_ninsn - (ctx->ninsns - start_insn));
328 emit_branch(BPF_JGE, RV_REG_A2, RV_REG_T1, off, ctx);
333 emit_addi(RV_REG_TCC, tcc, -1, ctx);
334 off = ninsns_rvoff(tc_ninsn - (ctx->ninsns - start_insn));
335 emit_branch(BPF_JSLT, RV_REG_TCC, RV_REG_ZERO, off, ctx);
337 /* prog = array->ptrs[index];
341 emit_slli(RV_REG_T2, RV_REG_A2, 3, ctx);
342 emit_add(RV_REG_T2, RV_REG_T2, RV_REG_A1, ctx);
343 off = offsetof(struct bpf_array, ptrs);
344 if (is_12b_check(off, insn))
346 emit_ld(RV_REG_T2, off, RV_REG_T2, ctx);
347 off = ninsns_rvoff(tc_ninsn - (ctx->ninsns - start_insn));
348 emit_branch(BPF_JEQ, RV_REG_T2, RV_REG_ZERO, off, ctx);
350 /* goto *(prog->bpf_func + 4); */
351 off = offsetof(struct bpf_prog, bpf_func);
352 if (is_12b_check(off, insn))
354 emit_ld(RV_REG_T3, off, RV_REG_T2, ctx);
355 __build_epilogue(true, ctx);
359 static void init_regs(u8 *rd, u8 *rs, const struct bpf_insn *insn,
360 struct rv_jit_context *ctx)
362 u8 code = insn->code;
365 case BPF_JMP | BPF_JA:
366 case BPF_JMP | BPF_CALL:
367 case BPF_JMP | BPF_EXIT:
368 case BPF_JMP | BPF_TAIL_CALL:
371 *rd = bpf_to_rv_reg(insn->dst_reg, ctx);
374 if (code & (BPF_ALU | BPF_X) || code & (BPF_ALU64 | BPF_X) ||
375 code & (BPF_JMP | BPF_X) || code & (BPF_JMP32 | BPF_X) ||
376 code & BPF_LDX || code & BPF_STX)
377 *rs = bpf_to_rv_reg(insn->src_reg, ctx);
380 static void emit_zext_32_rd_rs(u8 *rd, u8 *rs, struct rv_jit_context *ctx)
382 emit_mv(RV_REG_T2, *rd, ctx);
383 emit_zext_32(RV_REG_T2, ctx);
384 emit_mv(RV_REG_T1, *rs, ctx);
385 emit_zext_32(RV_REG_T1, ctx);
390 static void emit_sext_32_rd_rs(u8 *rd, u8 *rs, struct rv_jit_context *ctx)
392 emit_addiw(RV_REG_T2, *rd, 0, ctx);
393 emit_addiw(RV_REG_T1, *rs, 0, ctx);
398 static void emit_zext_32_rd_t1(u8 *rd, struct rv_jit_context *ctx)
400 emit_mv(RV_REG_T2, *rd, ctx);
401 emit_zext_32(RV_REG_T2, ctx);
402 emit_zext_32(RV_REG_T1, ctx);
406 static void emit_sext_32_rd(u8 *rd, struct rv_jit_context *ctx)
408 emit_addiw(RV_REG_T2, *rd, 0, ctx);
412 static int emit_jump_and_link(u8 rd, s64 rvoff, bool force_jalr,
413 struct rv_jit_context *ctx)
417 if (rvoff && is_21b_int(rvoff) && !force_jalr) {
418 emit(rv_jal(rd, rvoff >> 1), ctx);
420 } else if (in_auipc_jalr_range(rvoff)) {
421 upper = (rvoff + (1 << 11)) >> 12;
422 lower = rvoff & 0xfff;
423 emit(rv_auipc(RV_REG_T1, upper), ctx);
424 emit(rv_jalr(rd, RV_REG_T1, lower), ctx);
428 pr_err("bpf-jit: target offset 0x%llx is out of range\n", rvoff);
432 static bool is_signed_bpf_cond(u8 cond)
434 return cond == BPF_JSGT || cond == BPF_JSLT ||
435 cond == BPF_JSGE || cond == BPF_JSLE;
438 static int emit_call(bool fixed, u64 addr, struct rv_jit_context *ctx)
445 if (addr && ctx->insns) {
446 ip = (u64)(long)(ctx->insns + ctx->ninsns);
450 ret = emit_jump_and_link(RV_REG_RA, off, !fixed, ctx);
453 rd = bpf_to_rv_reg(BPF_REG_0, ctx);
454 emit_mv(rd, RV_REG_A0, ctx);
458 static void emit_atomic(u8 rd, u8 rs, s16 off, s32 imm, bool is64,
459 struct rv_jit_context *ctx)
465 if (is_12b_int(off)) {
466 emit_addi(RV_REG_T1, rd, off, ctx);
468 emit_imm(RV_REG_T1, off, ctx);
469 emit_add(RV_REG_T1, RV_REG_T1, rd, ctx);
475 /* lock *(u32/u64 *)(dst_reg + off16) <op>= src_reg */
477 emit(is64 ? rv_amoadd_d(RV_REG_ZERO, rs, rd, 0, 0) :
478 rv_amoadd_w(RV_REG_ZERO, rs, rd, 0, 0), ctx);
481 emit(is64 ? rv_amoand_d(RV_REG_ZERO, rs, rd, 0, 0) :
482 rv_amoand_w(RV_REG_ZERO, rs, rd, 0, 0), ctx);
485 emit(is64 ? rv_amoor_d(RV_REG_ZERO, rs, rd, 0, 0) :
486 rv_amoor_w(RV_REG_ZERO, rs, rd, 0, 0), ctx);
489 emit(is64 ? rv_amoxor_d(RV_REG_ZERO, rs, rd, 0, 0) :
490 rv_amoxor_w(RV_REG_ZERO, rs, rd, 0, 0), ctx);
492 /* src_reg = atomic_fetch_<op>(dst_reg + off16, src_reg) */
493 case BPF_ADD | BPF_FETCH:
494 emit(is64 ? rv_amoadd_d(rs, rs, rd, 0, 0) :
495 rv_amoadd_w(rs, rs, rd, 0, 0), ctx);
497 emit_zext_32(rs, ctx);
499 case BPF_AND | BPF_FETCH:
500 emit(is64 ? rv_amoand_d(rs, rs, rd, 0, 0) :
501 rv_amoand_w(rs, rs, rd, 0, 0), ctx);
503 emit_zext_32(rs, ctx);
505 case BPF_OR | BPF_FETCH:
506 emit(is64 ? rv_amoor_d(rs, rs, rd, 0, 0) :
507 rv_amoor_w(rs, rs, rd, 0, 0), ctx);
509 emit_zext_32(rs, ctx);
511 case BPF_XOR | BPF_FETCH:
512 emit(is64 ? rv_amoxor_d(rs, rs, rd, 0, 0) :
513 rv_amoxor_w(rs, rs, rd, 0, 0), ctx);
515 emit_zext_32(rs, ctx);
517 /* src_reg = atomic_xchg(dst_reg + off16, src_reg); */
519 emit(is64 ? rv_amoswap_d(rs, rs, rd, 0, 0) :
520 rv_amoswap_w(rs, rs, rd, 0, 0), ctx);
522 emit_zext_32(rs, ctx);
524 /* r0 = atomic_cmpxchg(dst_reg + off16, r0, src_reg); */
526 r0 = bpf_to_rv_reg(BPF_REG_0, ctx);
527 emit(is64 ? rv_addi(RV_REG_T2, r0, 0) :
528 rv_addiw(RV_REG_T2, r0, 0), ctx);
529 emit(is64 ? rv_lr_d(r0, 0, rd, 0, 0) :
530 rv_lr_w(r0, 0, rd, 0, 0), ctx);
531 jmp_offset = ninsns_rvoff(8);
532 emit(rv_bne(RV_REG_T2, r0, jmp_offset >> 1), ctx);
533 emit(is64 ? rv_sc_d(RV_REG_T3, rs, rd, 0, 0) :
534 rv_sc_w(RV_REG_T3, rs, rd, 0, 0), ctx);
535 jmp_offset = ninsns_rvoff(-6);
536 emit(rv_bne(RV_REG_T3, 0, jmp_offset >> 1), ctx);
537 emit(rv_fence(0x3, 0x3), ctx);
542 #define BPF_FIXUP_OFFSET_MASK GENMASK(26, 0)
543 #define BPF_FIXUP_REG_MASK GENMASK(31, 27)
545 bool ex_handler_bpf(const struct exception_table_entry *ex,
546 struct pt_regs *regs)
548 off_t offset = FIELD_GET(BPF_FIXUP_OFFSET_MASK, ex->fixup);
549 int regs_offset = FIELD_GET(BPF_FIXUP_REG_MASK, ex->fixup);
551 *(unsigned long *)((void *)regs + pt_regmap[regs_offset]) = 0;
552 regs->epc = (unsigned long)&ex->fixup - offset;
557 /* For accesses to BTF pointers, add an entry to the exception table */
558 static int add_exception_handler(const struct bpf_insn *insn,
559 struct rv_jit_context *ctx,
560 int dst_reg, int insn_len)
562 struct exception_table_entry *ex;
566 if (!ctx->insns || !ctx->prog->aux->extable || BPF_MODE(insn->code) != BPF_PROBE_MEM)
569 if (WARN_ON_ONCE(ctx->nexentries >= ctx->prog->aux->num_exentries))
572 if (WARN_ON_ONCE(insn_len > ctx->ninsns))
575 if (WARN_ON_ONCE(!rvc_enabled() && insn_len == 1))
578 ex = &ctx->prog->aux->extable[ctx->nexentries];
579 pc = (unsigned long)&ctx->insns[ctx->ninsns - insn_len];
581 offset = pc - (long)&ex->insn;
582 if (WARN_ON_ONCE(offset >= 0 || offset < INT_MIN))
587 * Since the extable follows the program, the fixup offset is always
588 * negative and limited to BPF_JIT_REGION_SIZE. Store a positive value
589 * to keep things simple, and put the destination register in the upper
590 * bits. We don't need to worry about buildtime or runtime sort
591 * modifying the upper bits because the table is already sorted, and
592 * isn't part of the main exception table.
594 offset = (long)&ex->fixup - (pc + insn_len * sizeof(u16));
595 if (!FIELD_FIT(BPF_FIXUP_OFFSET_MASK, offset))
598 ex->fixup = FIELD_PREP(BPF_FIXUP_OFFSET_MASK, offset) |
599 FIELD_PREP(BPF_FIXUP_REG_MASK, dst_reg);
600 ex->type = EX_TYPE_BPF;
606 int bpf_jit_emit_insn(const struct bpf_insn *insn, struct rv_jit_context *ctx,
609 bool is64 = BPF_CLASS(insn->code) == BPF_ALU64 ||
610 BPF_CLASS(insn->code) == BPF_JMP;
611 int s, e, rvoff, ret, i = insn - ctx->prog->insnsi;
612 struct bpf_prog_aux *aux = ctx->prog->aux;
613 u8 rd = -1, rs = -1, code = insn->code;
617 init_regs(&rd, &rs, insn, ctx);
621 case BPF_ALU | BPF_MOV | BPF_X:
622 case BPF_ALU64 | BPF_MOV | BPF_X:
624 /* Special mov32 for zext */
625 emit_zext_32(rd, ctx);
628 emit_mv(rd, rs, ctx);
629 if (!is64 && !aux->verifier_zext)
630 emit_zext_32(rd, ctx);
633 /* dst = dst OP src */
634 case BPF_ALU | BPF_ADD | BPF_X:
635 case BPF_ALU64 | BPF_ADD | BPF_X:
636 emit_add(rd, rd, rs, ctx);
637 if (!is64 && !aux->verifier_zext)
638 emit_zext_32(rd, ctx);
640 case BPF_ALU | BPF_SUB | BPF_X:
641 case BPF_ALU64 | BPF_SUB | BPF_X:
643 emit_sub(rd, rd, rs, ctx);
645 emit_subw(rd, rd, rs, ctx);
647 if (!is64 && !aux->verifier_zext)
648 emit_zext_32(rd, ctx);
650 case BPF_ALU | BPF_AND | BPF_X:
651 case BPF_ALU64 | BPF_AND | BPF_X:
652 emit_and(rd, rd, rs, ctx);
653 if (!is64 && !aux->verifier_zext)
654 emit_zext_32(rd, ctx);
656 case BPF_ALU | BPF_OR | BPF_X:
657 case BPF_ALU64 | BPF_OR | BPF_X:
658 emit_or(rd, rd, rs, ctx);
659 if (!is64 && !aux->verifier_zext)
660 emit_zext_32(rd, ctx);
662 case BPF_ALU | BPF_XOR | BPF_X:
663 case BPF_ALU64 | BPF_XOR | BPF_X:
664 emit_xor(rd, rd, rs, ctx);
665 if (!is64 && !aux->verifier_zext)
666 emit_zext_32(rd, ctx);
668 case BPF_ALU | BPF_MUL | BPF_X:
669 case BPF_ALU64 | BPF_MUL | BPF_X:
670 emit(is64 ? rv_mul(rd, rd, rs) : rv_mulw(rd, rd, rs), ctx);
671 if (!is64 && !aux->verifier_zext)
672 emit_zext_32(rd, ctx);
674 case BPF_ALU | BPF_DIV | BPF_X:
675 case BPF_ALU64 | BPF_DIV | BPF_X:
676 emit(is64 ? rv_divu(rd, rd, rs) : rv_divuw(rd, rd, rs), ctx);
677 if (!is64 && !aux->verifier_zext)
678 emit_zext_32(rd, ctx);
680 case BPF_ALU | BPF_MOD | BPF_X:
681 case BPF_ALU64 | BPF_MOD | BPF_X:
682 emit(is64 ? rv_remu(rd, rd, rs) : rv_remuw(rd, rd, rs), ctx);
683 if (!is64 && !aux->verifier_zext)
684 emit_zext_32(rd, ctx);
686 case BPF_ALU | BPF_LSH | BPF_X:
687 case BPF_ALU64 | BPF_LSH | BPF_X:
688 emit(is64 ? rv_sll(rd, rd, rs) : rv_sllw(rd, rd, rs), ctx);
689 if (!is64 && !aux->verifier_zext)
690 emit_zext_32(rd, ctx);
692 case BPF_ALU | BPF_RSH | BPF_X:
693 case BPF_ALU64 | BPF_RSH | BPF_X:
694 emit(is64 ? rv_srl(rd, rd, rs) : rv_srlw(rd, rd, rs), ctx);
695 if (!is64 && !aux->verifier_zext)
696 emit_zext_32(rd, ctx);
698 case BPF_ALU | BPF_ARSH | BPF_X:
699 case BPF_ALU64 | BPF_ARSH | BPF_X:
700 emit(is64 ? rv_sra(rd, rd, rs) : rv_sraw(rd, rd, rs), ctx);
701 if (!is64 && !aux->verifier_zext)
702 emit_zext_32(rd, ctx);
706 case BPF_ALU | BPF_NEG:
707 case BPF_ALU64 | BPF_NEG:
708 emit_sub(rd, RV_REG_ZERO, rd, ctx);
709 if (!is64 && !aux->verifier_zext)
710 emit_zext_32(rd, ctx);
713 /* dst = BSWAP##imm(dst) */
714 case BPF_ALU | BPF_END | BPF_FROM_LE:
717 emit_slli(rd, rd, 48, ctx);
718 emit_srli(rd, rd, 48, ctx);
721 if (!aux->verifier_zext)
722 emit_zext_32(rd, ctx);
730 case BPF_ALU | BPF_END | BPF_FROM_BE:
731 emit_li(RV_REG_T2, 0, ctx);
733 emit_andi(RV_REG_T1, rd, 0xff, ctx);
734 emit_add(RV_REG_T2, RV_REG_T2, RV_REG_T1, ctx);
735 emit_slli(RV_REG_T2, RV_REG_T2, 8, ctx);
736 emit_srli(rd, rd, 8, ctx);
740 emit_andi(RV_REG_T1, rd, 0xff, ctx);
741 emit_add(RV_REG_T2, RV_REG_T2, RV_REG_T1, ctx);
742 emit_slli(RV_REG_T2, RV_REG_T2, 8, ctx);
743 emit_srli(rd, rd, 8, ctx);
745 emit_andi(RV_REG_T1, rd, 0xff, ctx);
746 emit_add(RV_REG_T2, RV_REG_T2, RV_REG_T1, ctx);
747 emit_slli(RV_REG_T2, RV_REG_T2, 8, ctx);
748 emit_srli(rd, rd, 8, ctx);
752 emit_andi(RV_REG_T1, rd, 0xff, ctx);
753 emit_add(RV_REG_T2, RV_REG_T2, RV_REG_T1, ctx);
754 emit_slli(RV_REG_T2, RV_REG_T2, 8, ctx);
755 emit_srli(rd, rd, 8, ctx);
757 emit_andi(RV_REG_T1, rd, 0xff, ctx);
758 emit_add(RV_REG_T2, RV_REG_T2, RV_REG_T1, ctx);
759 emit_slli(RV_REG_T2, RV_REG_T2, 8, ctx);
760 emit_srli(rd, rd, 8, ctx);
762 emit_andi(RV_REG_T1, rd, 0xff, ctx);
763 emit_add(RV_REG_T2, RV_REG_T2, RV_REG_T1, ctx);
764 emit_slli(RV_REG_T2, RV_REG_T2, 8, ctx);
765 emit_srli(rd, rd, 8, ctx);
767 emit_andi(RV_REG_T1, rd, 0xff, ctx);
768 emit_add(RV_REG_T2, RV_REG_T2, RV_REG_T1, ctx);
769 emit_slli(RV_REG_T2, RV_REG_T2, 8, ctx);
770 emit_srli(rd, rd, 8, ctx);
772 emit_andi(RV_REG_T1, rd, 0xff, ctx);
773 emit_add(RV_REG_T2, RV_REG_T2, RV_REG_T1, ctx);
775 emit_mv(rd, RV_REG_T2, ctx);
779 case BPF_ALU | BPF_MOV | BPF_K:
780 case BPF_ALU64 | BPF_MOV | BPF_K:
781 emit_imm(rd, imm, ctx);
782 if (!is64 && !aux->verifier_zext)
783 emit_zext_32(rd, ctx);
786 /* dst = dst OP imm */
787 case BPF_ALU | BPF_ADD | BPF_K:
788 case BPF_ALU64 | BPF_ADD | BPF_K:
789 if (is_12b_int(imm)) {
790 emit_addi(rd, rd, imm, ctx);
792 emit_imm(RV_REG_T1, imm, ctx);
793 emit_add(rd, rd, RV_REG_T1, ctx);
795 if (!is64 && !aux->verifier_zext)
796 emit_zext_32(rd, ctx);
798 case BPF_ALU | BPF_SUB | BPF_K:
799 case BPF_ALU64 | BPF_SUB | BPF_K:
800 if (is_12b_int(-imm)) {
801 emit_addi(rd, rd, -imm, ctx);
803 emit_imm(RV_REG_T1, imm, ctx);
804 emit_sub(rd, rd, RV_REG_T1, ctx);
806 if (!is64 && !aux->verifier_zext)
807 emit_zext_32(rd, ctx);
809 case BPF_ALU | BPF_AND | BPF_K:
810 case BPF_ALU64 | BPF_AND | BPF_K:
811 if (is_12b_int(imm)) {
812 emit_andi(rd, rd, imm, ctx);
814 emit_imm(RV_REG_T1, imm, ctx);
815 emit_and(rd, rd, RV_REG_T1, ctx);
817 if (!is64 && !aux->verifier_zext)
818 emit_zext_32(rd, ctx);
820 case BPF_ALU | BPF_OR | BPF_K:
821 case BPF_ALU64 | BPF_OR | BPF_K:
822 if (is_12b_int(imm)) {
823 emit(rv_ori(rd, rd, imm), ctx);
825 emit_imm(RV_REG_T1, imm, ctx);
826 emit_or(rd, rd, RV_REG_T1, ctx);
828 if (!is64 && !aux->verifier_zext)
829 emit_zext_32(rd, ctx);
831 case BPF_ALU | BPF_XOR | BPF_K:
832 case BPF_ALU64 | BPF_XOR | BPF_K:
833 if (is_12b_int(imm)) {
834 emit(rv_xori(rd, rd, imm), ctx);
836 emit_imm(RV_REG_T1, imm, ctx);
837 emit_xor(rd, rd, RV_REG_T1, ctx);
839 if (!is64 && !aux->verifier_zext)
840 emit_zext_32(rd, ctx);
842 case BPF_ALU | BPF_MUL | BPF_K:
843 case BPF_ALU64 | BPF_MUL | BPF_K:
844 emit_imm(RV_REG_T1, imm, ctx);
845 emit(is64 ? rv_mul(rd, rd, RV_REG_T1) :
846 rv_mulw(rd, rd, RV_REG_T1), ctx);
847 if (!is64 && !aux->verifier_zext)
848 emit_zext_32(rd, ctx);
850 case BPF_ALU | BPF_DIV | BPF_K:
851 case BPF_ALU64 | BPF_DIV | BPF_K:
852 emit_imm(RV_REG_T1, imm, ctx);
853 emit(is64 ? rv_divu(rd, rd, RV_REG_T1) :
854 rv_divuw(rd, rd, RV_REG_T1), ctx);
855 if (!is64 && !aux->verifier_zext)
856 emit_zext_32(rd, ctx);
858 case BPF_ALU | BPF_MOD | BPF_K:
859 case BPF_ALU64 | BPF_MOD | BPF_K:
860 emit_imm(RV_REG_T1, imm, ctx);
861 emit(is64 ? rv_remu(rd, rd, RV_REG_T1) :
862 rv_remuw(rd, rd, RV_REG_T1), ctx);
863 if (!is64 && !aux->verifier_zext)
864 emit_zext_32(rd, ctx);
866 case BPF_ALU | BPF_LSH | BPF_K:
867 case BPF_ALU64 | BPF_LSH | BPF_K:
868 emit_slli(rd, rd, imm, ctx);
870 if (!is64 && !aux->verifier_zext)
871 emit_zext_32(rd, ctx);
873 case BPF_ALU | BPF_RSH | BPF_K:
874 case BPF_ALU64 | BPF_RSH | BPF_K:
876 emit_srli(rd, rd, imm, ctx);
878 emit(rv_srliw(rd, rd, imm), ctx);
880 if (!is64 && !aux->verifier_zext)
881 emit_zext_32(rd, ctx);
883 case BPF_ALU | BPF_ARSH | BPF_K:
884 case BPF_ALU64 | BPF_ARSH | BPF_K:
886 emit_srai(rd, rd, imm, ctx);
888 emit(rv_sraiw(rd, rd, imm), ctx);
890 if (!is64 && !aux->verifier_zext)
891 emit_zext_32(rd, ctx);
895 case BPF_JMP | BPF_JA:
896 rvoff = rv_offset(i, off, ctx);
897 ret = emit_jump_and_link(RV_REG_ZERO, rvoff, false, ctx);
902 /* IF (dst COND src) JUMP off */
903 case BPF_JMP | BPF_JEQ | BPF_X:
904 case BPF_JMP32 | BPF_JEQ | BPF_X:
905 case BPF_JMP | BPF_JGT | BPF_X:
906 case BPF_JMP32 | BPF_JGT | BPF_X:
907 case BPF_JMP | BPF_JLT | BPF_X:
908 case BPF_JMP32 | BPF_JLT | BPF_X:
909 case BPF_JMP | BPF_JGE | BPF_X:
910 case BPF_JMP32 | BPF_JGE | BPF_X:
911 case BPF_JMP | BPF_JLE | BPF_X:
912 case BPF_JMP32 | BPF_JLE | BPF_X:
913 case BPF_JMP | BPF_JNE | BPF_X:
914 case BPF_JMP32 | BPF_JNE | BPF_X:
915 case BPF_JMP | BPF_JSGT | BPF_X:
916 case BPF_JMP32 | BPF_JSGT | BPF_X:
917 case BPF_JMP | BPF_JSLT | BPF_X:
918 case BPF_JMP32 | BPF_JSLT | BPF_X:
919 case BPF_JMP | BPF_JSGE | BPF_X:
920 case BPF_JMP32 | BPF_JSGE | BPF_X:
921 case BPF_JMP | BPF_JSLE | BPF_X:
922 case BPF_JMP32 | BPF_JSLE | BPF_X:
923 case BPF_JMP | BPF_JSET | BPF_X:
924 case BPF_JMP32 | BPF_JSET | BPF_X:
925 rvoff = rv_offset(i, off, ctx);
928 if (is_signed_bpf_cond(BPF_OP(code)))
929 emit_sext_32_rd_rs(&rd, &rs, ctx);
931 emit_zext_32_rd_rs(&rd, &rs, ctx);
934 /* Adjust for extra insns */
935 rvoff -= ninsns_rvoff(e - s);
938 if (BPF_OP(code) == BPF_JSET) {
941 emit_and(RV_REG_T1, rd, rs, ctx);
942 emit_branch(BPF_JNE, RV_REG_T1, RV_REG_ZERO, rvoff,
945 emit_branch(BPF_OP(code), rd, rs, rvoff, ctx);
949 /* IF (dst COND imm) JUMP off */
950 case BPF_JMP | BPF_JEQ | BPF_K:
951 case BPF_JMP32 | BPF_JEQ | BPF_K:
952 case BPF_JMP | BPF_JGT | BPF_K:
953 case BPF_JMP32 | BPF_JGT | BPF_K:
954 case BPF_JMP | BPF_JLT | BPF_K:
955 case BPF_JMP32 | BPF_JLT | BPF_K:
956 case BPF_JMP | BPF_JGE | BPF_K:
957 case BPF_JMP32 | BPF_JGE | BPF_K:
958 case BPF_JMP | BPF_JLE | BPF_K:
959 case BPF_JMP32 | BPF_JLE | BPF_K:
960 case BPF_JMP | BPF_JNE | BPF_K:
961 case BPF_JMP32 | BPF_JNE | BPF_K:
962 case BPF_JMP | BPF_JSGT | BPF_K:
963 case BPF_JMP32 | BPF_JSGT | BPF_K:
964 case BPF_JMP | BPF_JSLT | BPF_K:
965 case BPF_JMP32 | BPF_JSLT | BPF_K:
966 case BPF_JMP | BPF_JSGE | BPF_K:
967 case BPF_JMP32 | BPF_JSGE | BPF_K:
968 case BPF_JMP | BPF_JSLE | BPF_K:
969 case BPF_JMP32 | BPF_JSLE | BPF_K:
970 rvoff = rv_offset(i, off, ctx);
973 emit_imm(RV_REG_T1, imm, ctx);
976 /* If imm is 0, simply use zero register. */
980 if (is_signed_bpf_cond(BPF_OP(code)))
981 emit_sext_32_rd(&rd, ctx);
983 emit_zext_32_rd_t1(&rd, ctx);
987 /* Adjust for extra insns */
988 rvoff -= ninsns_rvoff(e - s);
989 emit_branch(BPF_OP(code), rd, rs, rvoff, ctx);
992 case BPF_JMP | BPF_JSET | BPF_K:
993 case BPF_JMP32 | BPF_JSET | BPF_K:
994 rvoff = rv_offset(i, off, ctx);
996 if (is_12b_int(imm)) {
997 emit_andi(RV_REG_T1, rd, imm, ctx);
999 emit_imm(RV_REG_T1, imm, ctx);
1000 emit_and(RV_REG_T1, rd, RV_REG_T1, ctx);
1002 /* For jset32, we should clear the upper 32 bits of t1, but
1003 * sign-extension is sufficient here and saves one instruction,
1004 * as t1 is used only in comparison against zero.
1006 if (!is64 && imm < 0)
1007 emit_addiw(RV_REG_T1, RV_REG_T1, 0, ctx);
1009 rvoff -= ninsns_rvoff(e - s);
1010 emit_branch(BPF_JNE, RV_REG_T1, RV_REG_ZERO, rvoff, ctx);
1014 case BPF_JMP | BPF_CALL:
1020 ret = bpf_jit_get_func_addr(ctx->prog, insn, extra_pass, &addr,
1024 ret = emit_call(fixed, addr, ctx);
1030 case BPF_JMP | BPF_TAIL_CALL:
1031 if (emit_bpf_tail_call(i, ctx))
1035 /* function return */
1036 case BPF_JMP | BPF_EXIT:
1037 if (i == ctx->prog->len - 1)
1040 rvoff = epilogue_offset(ctx);
1041 ret = emit_jump_and_link(RV_REG_ZERO, rvoff, false, ctx);
1047 case BPF_LD | BPF_IMM | BPF_DW:
1049 struct bpf_insn insn1 = insn[1];
1052 imm64 = (u64)insn1.imm << 32 | (u32)imm;
1053 emit_imm(rd, imm64, ctx);
1057 /* LDX: dst = *(size *)(src + off) */
1058 case BPF_LDX | BPF_MEM | BPF_B:
1059 case BPF_LDX | BPF_MEM | BPF_H:
1060 case BPF_LDX | BPF_MEM | BPF_W:
1061 case BPF_LDX | BPF_MEM | BPF_DW:
1062 case BPF_LDX | BPF_PROBE_MEM | BPF_B:
1063 case BPF_LDX | BPF_PROBE_MEM | BPF_H:
1064 case BPF_LDX | BPF_PROBE_MEM | BPF_W:
1065 case BPF_LDX | BPF_PROBE_MEM | BPF_DW:
1067 int insn_len, insns_start;
1069 switch (BPF_SIZE(code)) {
1071 if (is_12b_int(off)) {
1072 insns_start = ctx->ninsns;
1073 emit(rv_lbu(rd, off, rs), ctx);
1074 insn_len = ctx->ninsns - insns_start;
1078 emit_imm(RV_REG_T1, off, ctx);
1079 emit_add(RV_REG_T1, RV_REG_T1, rs, ctx);
1080 insns_start = ctx->ninsns;
1081 emit(rv_lbu(rd, 0, RV_REG_T1), ctx);
1082 insn_len = ctx->ninsns - insns_start;
1083 if (insn_is_zext(&insn[1]))
1087 if (is_12b_int(off)) {
1088 insns_start = ctx->ninsns;
1089 emit(rv_lhu(rd, off, rs), ctx);
1090 insn_len = ctx->ninsns - insns_start;
1094 emit_imm(RV_REG_T1, off, ctx);
1095 emit_add(RV_REG_T1, RV_REG_T1, rs, ctx);
1096 insns_start = ctx->ninsns;
1097 emit(rv_lhu(rd, 0, RV_REG_T1), ctx);
1098 insn_len = ctx->ninsns - insns_start;
1099 if (insn_is_zext(&insn[1]))
1103 if (is_12b_int(off)) {
1104 insns_start = ctx->ninsns;
1105 emit(rv_lwu(rd, off, rs), ctx);
1106 insn_len = ctx->ninsns - insns_start;
1110 emit_imm(RV_REG_T1, off, ctx);
1111 emit_add(RV_REG_T1, RV_REG_T1, rs, ctx);
1112 insns_start = ctx->ninsns;
1113 emit(rv_lwu(rd, 0, RV_REG_T1), ctx);
1114 insn_len = ctx->ninsns - insns_start;
1115 if (insn_is_zext(&insn[1]))
1119 if (is_12b_int(off)) {
1120 insns_start = ctx->ninsns;
1121 emit_ld(rd, off, rs, ctx);
1122 insn_len = ctx->ninsns - insns_start;
1126 emit_imm(RV_REG_T1, off, ctx);
1127 emit_add(RV_REG_T1, RV_REG_T1, rs, ctx);
1128 insns_start = ctx->ninsns;
1129 emit_ld(rd, 0, RV_REG_T1, ctx);
1130 insn_len = ctx->ninsns - insns_start;
1134 ret = add_exception_handler(insn, ctx, rd, insn_len);
1139 /* speculation barrier */
1140 case BPF_ST | BPF_NOSPEC:
1143 /* ST: *(size *)(dst + off) = imm */
1144 case BPF_ST | BPF_MEM | BPF_B:
1145 emit_imm(RV_REG_T1, imm, ctx);
1146 if (is_12b_int(off)) {
1147 emit(rv_sb(rd, off, RV_REG_T1), ctx);
1151 emit_imm(RV_REG_T2, off, ctx);
1152 emit_add(RV_REG_T2, RV_REG_T2, rd, ctx);
1153 emit(rv_sb(RV_REG_T2, 0, RV_REG_T1), ctx);
1156 case BPF_ST | BPF_MEM | BPF_H:
1157 emit_imm(RV_REG_T1, imm, ctx);
1158 if (is_12b_int(off)) {
1159 emit(rv_sh(rd, off, RV_REG_T1), ctx);
1163 emit_imm(RV_REG_T2, off, ctx);
1164 emit_add(RV_REG_T2, RV_REG_T2, rd, ctx);
1165 emit(rv_sh(RV_REG_T2, 0, RV_REG_T1), ctx);
1167 case BPF_ST | BPF_MEM | BPF_W:
1168 emit_imm(RV_REG_T1, imm, ctx);
1169 if (is_12b_int(off)) {
1170 emit_sw(rd, off, RV_REG_T1, ctx);
1174 emit_imm(RV_REG_T2, off, ctx);
1175 emit_add(RV_REG_T2, RV_REG_T2, rd, ctx);
1176 emit_sw(RV_REG_T2, 0, RV_REG_T1, ctx);
1178 case BPF_ST | BPF_MEM | BPF_DW:
1179 emit_imm(RV_REG_T1, imm, ctx);
1180 if (is_12b_int(off)) {
1181 emit_sd(rd, off, RV_REG_T1, ctx);
1185 emit_imm(RV_REG_T2, off, ctx);
1186 emit_add(RV_REG_T2, RV_REG_T2, rd, ctx);
1187 emit_sd(RV_REG_T2, 0, RV_REG_T1, ctx);
1190 /* STX: *(size *)(dst + off) = src */
1191 case BPF_STX | BPF_MEM | BPF_B:
1192 if (is_12b_int(off)) {
1193 emit(rv_sb(rd, off, rs), ctx);
1197 emit_imm(RV_REG_T1, off, ctx);
1198 emit_add(RV_REG_T1, RV_REG_T1, rd, ctx);
1199 emit(rv_sb(RV_REG_T1, 0, rs), ctx);
1201 case BPF_STX | BPF_MEM | BPF_H:
1202 if (is_12b_int(off)) {
1203 emit(rv_sh(rd, off, rs), ctx);
1207 emit_imm(RV_REG_T1, off, ctx);
1208 emit_add(RV_REG_T1, RV_REG_T1, rd, ctx);
1209 emit(rv_sh(RV_REG_T1, 0, rs), ctx);
1211 case BPF_STX | BPF_MEM | BPF_W:
1212 if (is_12b_int(off)) {
1213 emit_sw(rd, off, rs, ctx);
1217 emit_imm(RV_REG_T1, off, ctx);
1218 emit_add(RV_REG_T1, RV_REG_T1, rd, ctx);
1219 emit_sw(RV_REG_T1, 0, rs, ctx);
1221 case BPF_STX | BPF_MEM | BPF_DW:
1222 if (is_12b_int(off)) {
1223 emit_sd(rd, off, rs, ctx);
1227 emit_imm(RV_REG_T1, off, ctx);
1228 emit_add(RV_REG_T1, RV_REG_T1, rd, ctx);
1229 emit_sd(RV_REG_T1, 0, rs, ctx);
1231 case BPF_STX | BPF_ATOMIC | BPF_W:
1232 case BPF_STX | BPF_ATOMIC | BPF_DW:
1233 emit_atomic(rd, rs, off, imm,
1234 BPF_SIZE(code) == BPF_DW, ctx);
1237 pr_err("bpf-jit: unknown opcode %02x\n", code);
1244 void bpf_jit_build_prologue(struct rv_jit_context *ctx)
1246 int stack_adjust = 0, store_offset, bpf_stack_adjust;
1248 bpf_stack_adjust = round_up(ctx->prog->aux->stack_depth, 16);
1249 if (bpf_stack_adjust)
1252 if (seen_reg(RV_REG_RA, ctx))
1254 stack_adjust += 8; /* RV_REG_FP */
1255 if (seen_reg(RV_REG_S1, ctx))
1257 if (seen_reg(RV_REG_S2, ctx))
1259 if (seen_reg(RV_REG_S3, ctx))
1261 if (seen_reg(RV_REG_S4, ctx))
1263 if (seen_reg(RV_REG_S5, ctx))
1265 if (seen_reg(RV_REG_S6, ctx))
1268 stack_adjust = round_up(stack_adjust, 16);
1269 stack_adjust += bpf_stack_adjust;
1271 store_offset = stack_adjust - 8;
1273 /* First instruction is always setting the tail-call-counter
1274 * (TCC) register. This instruction is skipped for tail calls.
1275 * Force using a 4-byte (non-compressed) instruction.
1277 emit(rv_addi(RV_REG_TCC, RV_REG_ZERO, MAX_TAIL_CALL_CNT), ctx);
1279 emit_addi(RV_REG_SP, RV_REG_SP, -stack_adjust, ctx);
1281 if (seen_reg(RV_REG_RA, ctx)) {
1282 emit_sd(RV_REG_SP, store_offset, RV_REG_RA, ctx);
1285 emit_sd(RV_REG_SP, store_offset, RV_REG_FP, ctx);
1287 if (seen_reg(RV_REG_S1, ctx)) {
1288 emit_sd(RV_REG_SP, store_offset, RV_REG_S1, ctx);
1291 if (seen_reg(RV_REG_S2, ctx)) {
1292 emit_sd(RV_REG_SP, store_offset, RV_REG_S2, ctx);
1295 if (seen_reg(RV_REG_S3, ctx)) {
1296 emit_sd(RV_REG_SP, store_offset, RV_REG_S3, ctx);
1299 if (seen_reg(RV_REG_S4, ctx)) {
1300 emit_sd(RV_REG_SP, store_offset, RV_REG_S4, ctx);
1303 if (seen_reg(RV_REG_S5, ctx)) {
1304 emit_sd(RV_REG_SP, store_offset, RV_REG_S5, ctx);
1307 if (seen_reg(RV_REG_S6, ctx)) {
1308 emit_sd(RV_REG_SP, store_offset, RV_REG_S6, ctx);
1312 emit_addi(RV_REG_FP, RV_REG_SP, stack_adjust, ctx);
1314 if (bpf_stack_adjust)
1315 emit_addi(RV_REG_S5, RV_REG_SP, bpf_stack_adjust, ctx);
1317 /* Program contains calls and tail calls, so RV_REG_TCC need
1318 * to be saved across calls.
1320 if (seen_tail_call(ctx) && seen_call(ctx))
1321 emit_mv(RV_REG_TCC_SAVED, RV_REG_TCC, ctx);
1323 ctx->stack_size = stack_adjust;
1326 void bpf_jit_build_epilogue(struct rv_jit_context *ctx)
1328 __build_epilogue(false, ctx);