Lines Matching refs:src_reg
256 insn->src_reg == 0; in bpf_helper_call()
262 insn->src_reg == BPF_PSEUDO_CALL; in bpf_pseudo_call()
268 insn->src_reg == BPF_PSEUDO_KFUNC_CALL; in bpf_pseudo_kfunc_call()
544 return insn->code == (BPF_JMP | BPF_JCOND) && insn->src_reg == BPF_MAY_GOTO; in is_may_goto_insn()
3410 insn[i].src_reg == 0 && in check_subprogs()
3577 if (insn->src_reg == BPF_PSEUDO_CALL) in is_reg64()
3658 return insn->src_reg; in insn_def_regno()
3941 if (insn->src_reg != BPF_PSEUDO_KFUNC_CALL) in disasm_kfunc_name()
4168 u32 sreg = insn->src_reg; in backtrack_insn()
4353 if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL && insn->imm == 0) in backtrack_insn()
4904 struct bpf_reg_state *src_reg) in assign_scalar_id_before_mov() argument
4906 if (src_reg->type != SCALAR_VALUE) in assign_scalar_id_before_mov()
4909 if (src_reg->id & BPF_ADD_CONST) { in assign_scalar_id_before_mov()
4915 src_reg->id = 0; in assign_scalar_id_before_mov()
4916 src_reg->off = 0; in assign_scalar_id_before_mov()
4919 if (!src_reg->id && !tnum_is_const(src_reg->var_off)) in assign_scalar_id_before_mov()
4924 src_reg->id = ++env->id_gen; in assign_scalar_id_before_mov()
7671 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_load_mem()
7680 src_reg_type = regs[insn->src_reg].type; in check_load_mem()
7685 err = check_mem_access(env, env->insn_idx, insn->src_reg, insn->off, in check_load_mem()
7703 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_store_reg()
7716 BPF_SIZE(insn->code), BPF_WRITE, insn->src_reg, in check_store_reg()
7735 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_atomic_rmw()
7758 if (is_pointer_value(env, insn->src_reg)) { in check_atomic_rmw()
7759 verbose(env, "R%d leaks addr into mem\n", insn->src_reg); in check_atomic_rmw()
7774 load_reg = insn->src_reg; in check_atomic_rmw()
7821 if (!atomic_ptr_type_ok(env, insn->src_reg, insn)) { in check_atomic_load()
7823 insn->src_reg, in check_atomic_load()
7824 reg_type_str(env, reg_state(env, insn->src_reg)->type)); in check_atomic_load()
14056 mark_reg_unknown(env, regs, insn->src_reg); in sanitize_speculative_path()
14178 u32 dst = insn->dst_reg, src = insn->src_reg; in sanitize_err()
14510 struct bpf_reg_state *src_reg) in scalar32_min_max_add() argument
14517 if (check_add_overflow(*dst_smin, src_reg->s32_min_value, dst_smin) || in scalar32_min_max_add()
14518 check_add_overflow(*dst_smax, src_reg->s32_max_value, dst_smax)) { in scalar32_min_max_add()
14522 if (check_add_overflow(*dst_umin, src_reg->u32_min_value, dst_umin) || in scalar32_min_max_add()
14523 check_add_overflow(*dst_umax, src_reg->u32_max_value, dst_umax)) { in scalar32_min_max_add()
14530 struct bpf_reg_state *src_reg) in scalar_min_max_add() argument
14537 if (check_add_overflow(*dst_smin, src_reg->smin_value, dst_smin) || in scalar_min_max_add()
14538 check_add_overflow(*dst_smax, src_reg->smax_value, dst_smax)) { in scalar_min_max_add()
14542 if (check_add_overflow(*dst_umin, src_reg->umin_value, dst_umin) || in scalar_min_max_add()
14543 check_add_overflow(*dst_umax, src_reg->umax_value, dst_umax)) { in scalar_min_max_add()
14550 struct bpf_reg_state *src_reg) in scalar32_min_max_sub() argument
14554 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_sub()
14555 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_sub()
14557 if (check_sub_overflow(*dst_smin, src_reg->s32_max_value, dst_smin) || in scalar32_min_max_sub()
14558 check_sub_overflow(*dst_smax, src_reg->s32_min_value, dst_smax)) { in scalar32_min_max_sub()
14575 struct bpf_reg_state *src_reg) in scalar_min_max_sub() argument
14579 u64 umin_val = src_reg->umin_value; in scalar_min_max_sub()
14580 u64 umax_val = src_reg->umax_value; in scalar_min_max_sub()
14582 if (check_sub_overflow(*dst_smin, src_reg->smax_value, dst_smin) || in scalar_min_max_sub()
14583 check_sub_overflow(*dst_smax, src_reg->smin_value, dst_smax)) { in scalar_min_max_sub()
14600 struct bpf_reg_state *src_reg) in scalar32_min_max_mul() argument
14608 if (check_mul_overflow(*dst_umax, src_reg->u32_max_value, dst_umax) || in scalar32_min_max_mul()
14609 check_mul_overflow(*dst_umin, src_reg->u32_min_value, dst_umin)) { in scalar32_min_max_mul()
14614 if (check_mul_overflow(*dst_smin, src_reg->s32_min_value, &tmp_prod[0]) || in scalar32_min_max_mul()
14615 check_mul_overflow(*dst_smin, src_reg->s32_max_value, &tmp_prod[1]) || in scalar32_min_max_mul()
14616 check_mul_overflow(*dst_smax, src_reg->s32_min_value, &tmp_prod[2]) || in scalar32_min_max_mul()
14617 check_mul_overflow(*dst_smax, src_reg->s32_max_value, &tmp_prod[3])) { in scalar32_min_max_mul()
14628 struct bpf_reg_state *src_reg) in scalar_min_max_mul() argument
14636 if (check_mul_overflow(*dst_umax, src_reg->umax_value, dst_umax) || in scalar_min_max_mul()
14637 check_mul_overflow(*dst_umin, src_reg->umin_value, dst_umin)) { in scalar_min_max_mul()
14642 if (check_mul_overflow(*dst_smin, src_reg->smin_value, &tmp_prod[0]) || in scalar_min_max_mul()
14643 check_mul_overflow(*dst_smin, src_reg->smax_value, &tmp_prod[1]) || in scalar_min_max_mul()
14644 check_mul_overflow(*dst_smax, src_reg->smin_value, &tmp_prod[2]) || in scalar_min_max_mul()
14645 check_mul_overflow(*dst_smax, src_reg->smax_value, &tmp_prod[3])) { in scalar_min_max_mul()
14656 struct bpf_reg_state *src_reg) in scalar32_min_max_and() argument
14658 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_and()
14661 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_and()
14687 struct bpf_reg_state *src_reg) in scalar_min_max_and() argument
14689 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_and()
14691 u64 umax_val = src_reg->umax_value; in scalar_min_max_and()
14719 struct bpf_reg_state *src_reg) in scalar32_min_max_or() argument
14721 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_or()
14724 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_or()
14750 struct bpf_reg_state *src_reg) in scalar_min_max_or() argument
14752 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_or()
14754 u64 umin_val = src_reg->umin_value; in scalar_min_max_or()
14782 struct bpf_reg_state *src_reg) in scalar32_min_max_xor() argument
14784 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_xor()
14810 struct bpf_reg_state *src_reg) in scalar_min_max_xor() argument
14812 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_xor()
14858 struct bpf_reg_state *src_reg) in scalar32_min_max_lsh() argument
14860 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_lsh()
14861 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_lsh()
14906 struct bpf_reg_state *src_reg) in scalar_min_max_lsh() argument
14908 u64 umax_val = src_reg->umax_value; in scalar_min_max_lsh()
14909 u64 umin_val = src_reg->umin_value; in scalar_min_max_lsh()
14921 struct bpf_reg_state *src_reg) in scalar32_min_max_rsh() argument
14924 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_rsh()
14925 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_rsh()
14953 struct bpf_reg_state *src_reg) in scalar_min_max_rsh() argument
14955 u64 umax_val = src_reg->umax_value; in scalar_min_max_rsh()
14956 u64 umin_val = src_reg->umin_value; in scalar_min_max_rsh()
14987 struct bpf_reg_state *src_reg) in scalar32_min_max_arsh() argument
14989 u64 umin_val = src_reg->u32_min_value; in scalar32_min_max_arsh()
15010 struct bpf_reg_state *src_reg) in scalar_min_max_arsh() argument
15012 u64 umin_val = src_reg->umin_value; in scalar_min_max_arsh()
15037 const struct bpf_reg_state *src_reg) in is_safe_to_compute_dst_reg_range() argument
15043 if (tnum_subreg_is_const(src_reg->var_off) in is_safe_to_compute_dst_reg_range()
15044 && src_reg->s32_min_value == src_reg->s32_max_value in is_safe_to_compute_dst_reg_range()
15045 && src_reg->u32_min_value == src_reg->u32_max_value) in is_safe_to_compute_dst_reg_range()
15048 if (tnum_is_const(src_reg->var_off) in is_safe_to_compute_dst_reg_range()
15049 && src_reg->smin_value == src_reg->smax_value in is_safe_to_compute_dst_reg_range()
15050 && src_reg->umin_value == src_reg->umax_value) in is_safe_to_compute_dst_reg_range()
15070 return (src_is_const && src_reg->umax_value < insn_bitness); in is_safe_to_compute_dst_reg_range()
15083 struct bpf_reg_state src_reg) in adjust_scalar_min_max_vals() argument
15089 if (!is_safe_to_compute_dst_reg_range(insn, &src_reg)) { in adjust_scalar_min_max_vals()
15116 scalar32_min_max_add(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15117 scalar_min_max_add(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15118 dst_reg->var_off = tnum_add(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15121 scalar32_min_max_sub(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15122 scalar_min_max_sub(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15123 dst_reg->var_off = tnum_sub(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15126 dst_reg->var_off = tnum_mul(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15127 scalar32_min_max_mul(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15128 scalar_min_max_mul(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15131 dst_reg->var_off = tnum_and(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15132 scalar32_min_max_and(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15133 scalar_min_max_and(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15136 dst_reg->var_off = tnum_or(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15137 scalar32_min_max_or(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15138 scalar_min_max_or(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15141 dst_reg->var_off = tnum_xor(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
15142 scalar32_min_max_xor(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15143 scalar_min_max_xor(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15147 scalar32_min_max_lsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15149 scalar_min_max_lsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15153 scalar32_min_max_rsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15155 scalar_min_max_rsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15159 scalar32_min_max_arsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15161 scalar_min_max_arsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
15182 struct bpf_reg_state *regs = state->regs, *dst_reg, *src_reg; in adjust_reg_min_max_vals() local
15189 src_reg = NULL; in adjust_reg_min_max_vals()
15209 src_reg = ®s[insn->src_reg]; in adjust_reg_min_max_vals()
15210 if (src_reg->type != SCALAR_VALUE) { in adjust_reg_min_max_vals()
15233 src_reg, dst_reg); in adjust_reg_min_max_vals()
15237 err = mark_chain_precision(env, insn->src_reg); in adjust_reg_min_max_vals()
15241 dst_reg, src_reg); in adjust_reg_min_max_vals()
15244 err = mark_chain_precision(env, insn->src_reg); in adjust_reg_min_max_vals()
15254 src_reg = &off_reg; in adjust_reg_min_max_vals()
15257 ptr_reg, src_reg); in adjust_reg_min_max_vals()
15266 if (WARN_ON(!src_reg)) { in adjust_reg_min_max_vals()
15271 err = adjust_scalar_min_max_vals(env, insn, dst_reg, *src_reg); in adjust_reg_min_max_vals()
15285 dst_reg->id && is_reg_const(src_reg, false)) { in adjust_reg_min_max_vals()
15286 u64 val = reg_const_value(src_reg, false); in adjust_reg_min_max_vals()
15321 insn->src_reg != BPF_REG_0 || in check_alu_op()
15327 if (insn->src_reg != BPF_REG_0 || insn->off != 0 || in check_alu_op()
15379 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_alu_op()
15383 if (insn->src_reg != BPF_REG_0 || insn->off != 0) { in check_alu_op()
15395 struct bpf_reg_state *src_reg = regs + insn->src_reg; in check_alu_op() local
15411 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
15412 copy_register_state(dst_reg, src_reg); in check_alu_op()
15417 if (is_pointer_value(env, insn->src_reg)) { in check_alu_op()
15420 insn->src_reg); in check_alu_op()
15422 } else if (src_reg->type == SCALAR_VALUE) { in check_alu_op()
15425 no_sext = src_reg->umax_value < (1ULL << (insn->off - 1)); in check_alu_op()
15427 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
15428 copy_register_state(dst_reg, src_reg); in check_alu_op()
15440 if (is_pointer_value(env, insn->src_reg)) { in check_alu_op()
15443 insn->src_reg); in check_alu_op()
15445 } else if (src_reg->type == SCALAR_VALUE) { in check_alu_op()
15447 bool is_src_reg_u32 = get_reg_width(src_reg) <= 32; in check_alu_op()
15450 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
15451 copy_register_state(dst_reg, src_reg); in check_alu_op()
15462 bool no_sext = src_reg->umax_value < (1ULL << (insn->off - 1)); in check_alu_op()
15465 assign_scalar_id_before_mov(env, src_reg); in check_alu_op()
15466 copy_register_state(dst_reg, src_reg); in check_alu_op()
15509 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_alu_op()
15513 if (insn->src_reg != BPF_REG_0 || insn->off > 1 || in check_alu_op()
15783 struct bpf_reg_state *src_reg, in is_pkt_ptr_branch_taken() argument
15788 if (src_reg->type == PTR_TO_PACKET_END) { in is_pkt_ptr_branch_taken()
15791 pkt = src_reg; in is_pkt_ptr_branch_taken()
16174 struct bpf_reg_state *src_reg, in try_match_pkt_pointers() argument
16188 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
16190 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
16196 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
16198 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
16200 find_good_pkt_pointers(other_branch, src_reg, in try_match_pkt_pointers()
16201 src_reg->type, true); in try_match_pkt_pointers()
16202 mark_pkt_end(this_branch, insn->src_reg, false); in try_match_pkt_pointers()
16209 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
16211 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
16217 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
16219 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
16221 find_good_pkt_pointers(this_branch, src_reg, in try_match_pkt_pointers()
16222 src_reg->type, false); in try_match_pkt_pointers()
16223 mark_pkt_end(other_branch, insn->src_reg, true); in try_match_pkt_pointers()
16230 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
16232 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
16238 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
16240 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
16242 find_good_pkt_pointers(other_branch, src_reg, in try_match_pkt_pointers()
16243 src_reg->type, false); in try_match_pkt_pointers()
16244 mark_pkt_end(this_branch, insn->src_reg, true); in try_match_pkt_pointers()
16251 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
16253 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
16259 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
16261 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
16263 find_good_pkt_pointers(this_branch, src_reg, in try_match_pkt_pointers()
16264 src_reg->type, true); in try_match_pkt_pointers()
16265 mark_pkt_end(other_branch, insn->src_reg, false); in try_match_pkt_pointers()
16376 struct bpf_reg_state *dst_reg, *other_branch_regs, *src_reg = NULL; in check_cond_jmp_op() local
16395 insn->src_reg != BPF_MAY_GOTO || in check_cond_jmp_op()
16427 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_cond_jmp_op()
16431 src_reg = ®s[insn->src_reg]; in check_cond_jmp_op()
16432 if (!(reg_is_pkt_pointer_any(dst_reg) && reg_is_pkt_pointer_any(src_reg)) && in check_cond_jmp_op()
16433 is_pointer_value(env, insn->src_reg)) { in check_cond_jmp_op()
16435 insn->src_reg); in check_cond_jmp_op()
16439 if (insn->src_reg != BPF_REG_0) { in check_cond_jmp_op()
16443 src_reg = &env->fake_reg[0]; in check_cond_jmp_op()
16444 memset(src_reg, 0, sizeof(*src_reg)); in check_cond_jmp_op()
16445 src_reg->type = SCALAR_VALUE; in check_cond_jmp_op()
16446 __mark_reg_known(src_reg, insn->imm); in check_cond_jmp_op()
16450 pred = is_branch_taken(dst_reg, src_reg, opcode, is_jmp32); in check_cond_jmp_op()
16458 !__is_pointer_value(false, src_reg)) in check_cond_jmp_op()
16459 err = mark_chain_precision(env, insn->src_reg); in check_cond_jmp_op()
16497 if (BPF_SRC(insn->code) == BPF_X && src_reg->type == SCALAR_VALUE && src_reg->id) in check_cond_jmp_op()
16498 collect_linked_regs(this_branch, src_reg->id, &linked_regs); in check_cond_jmp_op()
16516 &other_branch_regs[insn->src_reg], in check_cond_jmp_op()
16517 dst_reg, src_reg, opcode, is_jmp32); in check_cond_jmp_op()
16535 src_reg->type == SCALAR_VALUE && src_reg->id && in check_cond_jmp_op()
16536 !WARN_ON_ONCE(src_reg->id != other_branch_regs[insn->src_reg].id)) { in check_cond_jmp_op()
16537 sync_linked_regs(this_branch, src_reg, &linked_regs); in check_cond_jmp_op()
16538 sync_linked_regs(other_branch, &other_branch_regs[insn->src_reg], &linked_regs); in check_cond_jmp_op()
16559 __is_pointer_value(false, src_reg) && __is_pointer_value(false, dst_reg) && in check_cond_jmp_op()
16560 type_may_be_null(src_reg->type) != type_may_be_null(dst_reg->type) && in check_cond_jmp_op()
16561 base_type(src_reg->type) != PTR_TO_BTF_ID && in check_cond_jmp_op()
16576 if (type_may_be_null(src_reg->type)) in check_cond_jmp_op()
16577 mark_ptr_not_null_reg(&eq_branch_regs[insn->src_reg]); in check_cond_jmp_op()
16597 } else if (!try_match_pkt_pointers(insn, dst_reg, ®s[insn->src_reg], in check_cond_jmp_op()
16632 if (insn->src_reg == 0) { in check_ld_imm()
16646 if (insn->src_reg == BPF_PSEUDO_BTF_ID) { in check_ld_imm()
16663 if (insn->src_reg == BPF_PSEUDO_FUNC) { in check_ld_imm()
16685 if (insn->src_reg == BPF_PSEUDO_MAP_VALUE || in check_ld_imm()
16686 insn->src_reg == BPF_PSEUDO_MAP_IDX_VALUE) { in check_ld_imm()
16695 } else if (insn->src_reg == BPF_PSEUDO_MAP_FD || in check_ld_imm()
16696 insn->src_reg == BPF_PSEUDO_MAP_IDX) { in check_ld_imm()
16752 (mode == BPF_ABS && insn->src_reg != BPF_REG_0)) { in check_ld_abs()
16778 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_ld_abs()
17374 ldx->src_reg != BPF_REG_10) in mark_fastcall_pattern_for_call()
17377 if (stx->src_reg != ldx->dst_reg) in mark_fastcall_pattern_for_call()
17380 if ((BIT(stx->src_reg) & expected_regs_mask) == 0) in mark_fastcall_pattern_for_call()
17388 expected_regs_mask &= ~BIT(stx->src_reg); in mark_fastcall_pattern_for_call()
17506 } else if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL) { in visit_insn()
17533 return visit_func_call_insn(t, insns, env, insn->src_reg == BPF_PSEUDO_CALL); in visit_insn()
19529 insn->src_reg != BPF_REG_0) { in do_check()
19556 (insn->src_reg != BPF_PSEUDO_KFUNC_CALL in do_check()
19558 (insn->src_reg != BPF_REG_0 && in do_check()
19559 insn->src_reg != BPF_PSEUDO_CALL && in do_check()
19560 insn->src_reg != BPF_PSEUDO_KFUNC_CALL) || in do_check()
19568 if ((insn->src_reg == BPF_REG_0 && insn->imm != BPF_FUNC_spin_unlock) || in do_check()
19569 (insn->src_reg == BPF_PSEUDO_KFUNC_CALL && in do_check()
19575 if (insn->src_reg == BPF_PSEUDO_CALL) { in do_check()
19577 } else if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL) { in do_check()
19592 insn->src_reg != BPF_REG_0 || in do_check()
19609 insn->src_reg != BPF_REG_0 || in do_check()
20106 insn[1].dst_reg != 0 || insn[1].src_reg != 0 || in resolve_pseudo_ldimm64()
20112 if (insn[0].src_reg == 0) in resolve_pseudo_ldimm64()
20116 if (insn[0].src_reg == BPF_PSEUDO_BTF_ID) { in resolve_pseudo_ldimm64()
20124 if (insn[0].src_reg == BPF_PSEUDO_FUNC) { in resolve_pseudo_ldimm64()
20133 switch (insn[0].src_reg) { in resolve_pseudo_ldimm64()
20147 switch (insn[0].src_reg) { in resolve_pseudo_ldimm64()
20172 if (insn[0].src_reg == BPF_PSEUDO_MAP_FD || in resolve_pseudo_ldimm64()
20173 insn[0].src_reg == BPF_PSEUDO_MAP_IDX) { in resolve_pseudo_ldimm64()
20245 if (insn->src_reg == BPF_PSEUDO_FUNC) in convert_pseudo_ld_imm64()
20247 insn->src_reg = 0; in convert_pseudo_ld_imm64()
20730 zext_patch[1].src_reg = load_reg; in opt_subreg_zext_lo32_rnd_hi32()
21628 BPF_JNE | BPF_K, insn->src_reg, in do_misc_fixups()
21637 BPF_JEQ | BPF_K, insn->src_reg, in do_misc_fixups()
21648 BPF_MOV64_REG(BPF_REG_AX, insn->src_reg), in do_misc_fixups()
21671 BPF_MOV64_REG(BPF_REG_AX, insn->src_reg), in do_misc_fixups()
21720 *patch++ = BPF_MOV64_REG(BPF_REG_AX, insn->src_reg); in do_misc_fixups()
21779 off_reg = issrc ? insn->src_reg : insn->dst_reg; in do_misc_fixups()
21793 *patch++ = BPF_MOV64_REG(insn->dst_reg, insn->src_reg); in do_misc_fixups()
21794 insn->src_reg = BPF_REG_AX; in do_misc_fixups()
21881 if (insn->src_reg == BPF_PSEUDO_CALL) in do_misc_fixups()
21883 if (insn->src_reg == BPF_PSEUDO_KFUNC_CALL) { in do_misc_fixups()
22537 insn->src_reg == 0 && in is_bpf_loop_call()
23688 u16 src = BIT(insn->src_reg); in compute_insn_live_regs()