Home
last modified time | relevance | path

Searched refs:bp (Results 1 – 25 of 67) sorted by relevance

123

/dpdk/drivers/net/bnxt/
H A Dbnxt_hwrm.h74 bp->tx_cos_queue[x].profile = \
79 bp->rx_cos_queue[x].profile = \
165 int bnxt_hwrm_func_qcaps(struct bnxt *bp);
166 int bnxt_hwrm_func_reset(struct bnxt *bp);
180 int bnxt_hwrm_ring_alloc(struct bnxt *bp,
185 int bnxt_hwrm_ring_free(struct bnxt *bp,
275 int bnxt_hwrm_ptp_cfg(struct bnxt *bp);
284 int bnxt_alloc_ctx_mem(struct bnxt *bp);
290 int bnxt_hwrm_set_mac(struct bnxt *bp);
293 int bnxt_hwrm_fw_reset(struct bnxt *bp);
[all …]
H A Dbnxt_ethdev.c1026 if (BNXT_PF(bp) || BNXT_VF_IS_TRUSTED(bp)) { in bnxt_dev_info_get_op()
1102 bnxt_free_cp_ring(bp, bp->async_cp_ring); in bnxt_dev_configure_op()
1141 bp->rx_cp_nr_rings = bp->rx_nr_rings; in bnxt_dev_configure_op()
1142 bp->tx_cp_nr_rings = bp->tx_nr_rings; in bnxt_dev_configure_op()
1172 bp->max_tx_rings, bp->max_rx_rings, bp->max_cp_rings, in bnxt_dev_configure_op()
1173 bp->max_stat_ctx, bp->max_ring_grps, bp->max_vnics); in bnxt_dev_configure_op()
3152 bp = rxq->bp; in bnxt_rx_queue_count_op()
3213 struct bnxt *bp = rxq->bp; in bnxt_rx_descriptor_status_op() local
4685 if (!bp->bar0 || !bp->doorbell_base) { in bnxt_map_pci_bars()
5053 bp->mac_addr[0], bp->mac_addr[1], bp->mac_addr[2], in bnxt_setup_mac_addr()
[all …]
H A Dbnxt_hwrm.c789 ptp->bp = bp; in bnxt_hwrm_ptp_qcfg()
904 bp->max_l2_ctx += bp->max_rx_em_flows; in __bnxt_hwrm_func_qcaps()
907 bp->max_l2_ctx, bp->max_vnics); in __bnxt_hwrm_func_qcaps()
1055 if ((BNXT_PF(bp) || BNXT_VF_IS_TRUSTED(bp)) && !BNXT_STINGRAY(bp)) in bnxt_hwrm_func_driver_register()
1091 if (BNXT_PF(bp) || BNXT_VF_IS_TRUSTED(bp)) in bnxt_hwrm_func_driver_register()
1213 bp->max_l2_ctx += bp->max_rx_em_flows; in bnxt_hwrm_func_resc_qcaps()
1673 bp->max_q = bp->max_tc; in bnxt_hwrm_queue_qportcfg()
3220 if (!BNXT_SINGLE_PF(bp) || BNXT_VF(bp)) in bnxt_set_hwrm_link_config()
4114 if (!(BNXT_PF(bp) && bp->pdev->max_vfs)) in bnxt_hwrm_func_buf_unrgtr()
4466 BNXT_NPAR(bp) || BNXT_MH(bp) || BNXT_TOTAL_VFS(bp)) in bnxt_hwrm_port_clr_stats()
[all …]
H A Dbnxt_cpr.c23 if (!(bp->recovery_info && in bnxt_wait_for_device_shutdown()
59 if (!BNXT_TRUFLOW_EN(bp)) in bnxt_process_default_vnic_change()
70 if (!bp->rep_info) in bnxt_process_default_vnic_change()
112 struct bnxt *bp = arg; in bnxt_handle_vf_cfg_change() local
164 if (BNXT_VF(bp)) in bnxt_handle_async_event()
207 (void *)bp); in bnxt_handle_async_event()
210 info = bp->recovery_info; in bnxt_handle_async_event()
257 if (bp->recovery_info) in bnxt_handle_async_event()
298 fw_vf_id >= bp->pf->first_vf_id + bp->pf->active_vfs) { in bnxt_handle_fwd_req()
302 (bp->pf->first_vf_id) + bp->pf->active_vfs - 1, in bnxt_handle_fwd_req()
[all …]
H A Drte_pmd_bnxt.c45 struct bnxt *bp; in rte_pmd_bnxt_set_tx_loopback() local
59 if (!BNXT_PF(bp)) { in rte_pmd_bnxt_set_tx_loopback()
86 struct bnxt *bp; in rte_pmd_bnxt_set_all_queues_drop_en() local
113 rc = bnxt_hwrm_vnic_cfg(bp, &bp->vnic_info[i]); in rte_pmd_bnxt_set_all_queues_drop_en()
139 struct bnxt *bp; in rte_pmd_bnxt_set_vf_mac_addr() local
179 struct bnxt *bp; in rte_pmd_bnxt_set_vf_rate_limit() local
236 struct bnxt *bp; in rte_pmd_bnxt_set_vf_mac_anti_spoof() local
295 struct bnxt *bp; in rte_pmd_bnxt_set_vf_vlan_anti_spoof() local
355 struct bnxt *bp; in rte_pmd_bnxt_set_vf_vlan_stripq() local
399 struct bnxt *bp; in rte_pmd_bnxt_set_vf_rxmode() local
[all …]
H A Dbnxt_irq.c29 if (bp == NULL) in bnxt_int_handler()
31 cpr = bp->async_cp_ring; in bnxt_int_handler()
60 if (BNXT_HAS_NQ(bp)) in bnxt_int_handler()
88 bp->eth_dev); in bnxt_free_int()
103 rte_free(bp->irq_tbl); in bnxt_free_int()
104 bp->irq_tbl = NULL; in bnxt_free_int()
116 if (is_bnxt_in_error(bp)) in bnxt_disable_int()
123 if (BNXT_HAS_NQ(bp)) in bnxt_disable_int()
140 if (BNXT_HAS_NQ(bp)) in bnxt_enable_int()
156 if (bp->irq_tbl) { in bnxt_setup_int()
[all …]
H A Dbnxt_ring.c60 if (BNXT_CHIP_P5(bp)) { in bnxt_alloc_ring_grps()
62 } else if (bp->max_ring_grps < bp->rx_cp_nr_rings) { in bnxt_alloc_ring_grps()
72 if (!bp->grp_info) { in bnxt_alloc_ring_grps()
77 bnxt_init_ring_grps(bp); in bnxt_alloc_ring_grps()
417 if (BNXT_HAS_NQ(bp)) { in bnxt_alloc_cmpl_ring()
447 if (!BNXT_HAS_NQ(bp) || bp->rxtx_nq_ring) in bnxt_alloc_rxtx_nq_ring()
475 rc = bnxt_alloc_rings(bp, bp->eth_dev->device->numa_node, 0, NULL, in bnxt_alloc_rxtx_nq_ring()
783 if (BNXT_HAS_NQ(bp)) in bnxt_alloc_async_cp_ring()
798 if (BNXT_HAS_NQ(bp)) in bnxt_alloc_async_cp_ring()
814 if (BNXT_HAS_NQ(bp)) in bnxt_free_async_cp_ring()
[all …]
H A Dbnxt_filter.c67 max_filters = bp->max_l2_ctx; in bnxt_init_filters()
70 filter = &bp->filter_info[i]; in bnxt_init_filters()
90 if (bp->vnic_info == NULL) in bnxt_free_all_filters()
94 vnic = &bp->vnic_info[i]; in bnxt_free_all_filters()
117 if (bp->filter_info == NULL) in bnxt_free_filter_mem()
121 max_filters = bp->max_l2_ctx; in bnxt_free_filter_mem()
150 rte_free(bp->filter_info); in bnxt_free_filter_mem()
151 bp->filter_info = NULL; in bnxt_free_filter_mem()
167 max_filters = bp->max_l2_ctx; in bnxt_alloc_filter_mem()
177 bp->filter_info = filter_mem; in bnxt_alloc_filter_mem()
[all …]
H A Dbnxt_rxq.c79 bp->nr_vnics = 0; in bnxt_mq_rx_configure()
104 pools = bp->rx_cosq_cnt ? bp->rx_cosq_cnt : 1; in bnxt_mq_rx_configure()
113 pools = bp->rx_cosq_cnt ? bp->rx_cosq_cnt : pools; in bnxt_mq_rx_configure()
124 vnic = &bp->vnic_info[i]; in bnxt_mq_rx_configure()
131 bp->nr_vnics++; in bnxt_mq_rx_configure()
180 vnic = &bp->vnic_info[i]; in bnxt_mq_rx_configure()
231 if (rxq->bp == NULL || in bnxt_rx_queue_release_mbufs()
268 rxq = bp->rx_queues[i]; in bnxt_free_rx_mbufs()
330 rc = is_bnxt_in_error(bp); in bnxt_rx_queue_setup_op()
357 rxq->bp = bp; in bnxt_rx_queue_setup_op()
[all …]
H A Dbnxt_vnic.c44 max_vnics = bp->max_vnics; in bnxt_init_vnics()
47 vnic = &bp->vnic_info[i]; in bnxt_init_vnics()
81 if (bp->vnic_info == NULL) in bnxt_free_all_vnics()
85 vnic = &bp->vnic_info[i]; in bnxt_free_all_vnics()
96 if (bp->vnic_info == NULL) in bnxt_free_vnic_attributes()
100 vnic = &bp->vnic_info[i]; in bnxt_free_vnic_attributes()
123 if (BNXT_CHIP_P5(bp)) in bnxt_alloc_vnic_attributes()
132 vnic = &bp->vnic_info[i]; in bnxt_alloc_vnic_attributes()
189 rte_free(bp->vnic_info); in bnxt_free_vnic_mem()
190 bp->vnic_info = NULL; in bnxt_free_vnic_mem()
[all …]
H A Dbnxt.h240 #define BNXT_MAX_VFS(bp) ((bp)->pf->max_vfs) argument
246 #define BNXT_TOTAL_VFS(bp) ((bp)->pf->total_vfs) argument
248 #define BNXT_PF_RINGS_USED(bp) bnxt_get_num_queues(bp) argument
249 #define BNXT_PF_RINGS_AVAIL(bp) ((bp)->pf->max_cp_rings - \ argument
672 #define BNXT_PF(bp) (!((bp)->flags & BNXT_FLAG_VF)) argument
673 #define BNXT_VF(bp) ((bp)->flags & BNXT_FLAG_VF) argument
674 #define BNXT_NPAR(bp) ((bp)->flags & BNXT_FLAG_NPAR_PF) argument
676 #define BNXT_SINGLE_PF(bp) (BNXT_PF(bp) && !BNXT_NPAR(bp) && !BNXT_MH(bp)) argument
682 #define BNXT_HAS_NQ(bp) BNXT_CHIP_P5(bp) argument
683 #define BNXT_HAS_RING_GRPS(bp) (!BNXT_CHIP_P5(bp)) argument
[all …]
H A Dbnxt_flow.c947 if (bp->nr_vnics > bp->max_vnics - 1) in bnxt_vnic_prep()
1004 bp->nr_vnics++; in bnxt_vnic_prep()
1786 bp->nr_vnics--; in bnxt_flow_validate()
1900 !(bp->flags & BNXT_FLAG_FC_THREAD) && BNXT_FLOW_XSTATS_EN(bp)) { in bnxt_setup_flow_counter()
1903 (void *)bp); in bnxt_setup_flow_counter()
1937 (void *)bp); in bnxt_flow_cnt_alarm_cb()
1956 if (BNXT_VF(bp) && !BNXT_VF_IS_TRUSTED(bp)) { in bnxt_flow_create()
2166 tun_dst_fid + bp->first_vf_id, bp->fw_fid); in bnxt_handle_tunnel_redirect_destroy()
2171 if (bp->fw_fid != (tun_dst_fid + bp->first_vf_id)) { in bnxt_handle_tunnel_redirect_destroy()
2237 bnxt_set_rx_mask_no_vlan(bp, &bp->vnic_info[0]); in _bnxt_flow_destroy()
[all …]
H A Dbnxt_stats.c556 rc = is_bnxt_in_error(bp); in bnxt_stats_get_op()
638 bnxt_clear_prev_stat(bp); in bnxt_stats_reset_op()
689 rc = is_bnxt_in_error(bp); in bnxt_dev_xstats_get_op()
698 bnxt_flow_stats_cnt(bp); in bnxt_dev_xstats_get_op()
735 bnxt_hwrm_port_qstats(bp); in bnxt_dev_xstats_get_op()
825 if (i > bp->max_l2_ctx) in bnxt_dev_xstats_get_op()
861 rc = is_bnxt_in_error(bp); in bnxt_dev_xstats_get_names_op()
936 if (BNXT_VF(bp) || !BNXT_SINGLE_PF(bp) || in bnxt_dev_xstats_reset_op()
947 bnxt_clear_prev_stat(bp); in bnxt_dev_xstats_reset_op()
1040 vnic = &bp->vnic_info[i]; in bnxt_flow_stats_req()
[all …]
H A Dbnxt_txq.c31 if (bp->fw_cap & BNXT_FW_CAP_VLAN_TX_INSERT) in bnxt_get_tx_port_offloads()
34 if (BNXT_TUNNELED_OFFLOADS_CAP_ALL_EN(bp)) in bnxt_get_tx_port_offloads()
39 if (BNXT_TUNNELED_OFFLOADS_CAP_GRE_EN(bp)) in bnxt_get_tx_port_offloads()
41 if (BNXT_TUNNELED_OFFLOADS_CAP_NGE_EN(bp)) in bnxt_get_tx_port_offloads()
74 void bnxt_free_tx_mbufs(struct bnxt *bp) in bnxt_free_tx_mbufs() argument
80 txq = bp->tx_queues[i]; in bnxt_free_tx_mbufs()
90 if (is_bnxt_in_error(txq->bp)) in bnxt_tx_queue_release_op()
129 rc = is_bnxt_in_error(bp); in bnxt_tx_queue_setup_op()
133 if (queue_idx >= bnxt_max_rings(bp)) { in bnxt_tx_queue_setup_op()
136 queue_idx, bp->max_tx_rings); in bnxt_tx_queue_setup_op()
[all …]
H A Dbnxt_vnic.h59 int bnxt_free_vnic(struct bnxt *bp, struct bnxt_vnic_info *vnic,
61 struct bnxt_vnic_info *bnxt_alloc_vnic(struct bnxt *bp);
62 void bnxt_free_all_vnics(struct bnxt *bp);
63 void bnxt_free_vnic_attributes(struct bnxt *bp);
64 int bnxt_alloc_vnic_attributes(struct bnxt *bp, bool reconfig);
65 void bnxt_free_vnic_mem(struct bnxt *bp);
66 int bnxt_alloc_vnic_mem(struct bnxt *bp);
67 int bnxt_vnic_grp_alloc(struct bnxt *bp, struct bnxt_vnic_info *vnic);
70 int bnxt_rte_to_hwrm_hash_level(struct bnxt *bp, uint64_t hash_f, uint32_t lvl);
71 uint64_t bnxt_hwrm_to_rte_rss_level(struct bnxt *bp, uint32_t mode);
H A Dbnxt_filter.h11 #define bnxt_vlan_filter_exists(bp, filter, chk, vlan_id) \ argument
15 !memcmp((filter)->l2_addr, (bp)->mac_addr, \
96 struct bnxt_filter_info *bnxt_alloc_filter(struct bnxt *bp);
97 struct bnxt_filter_info *bnxt_alloc_vf_filter(struct bnxt *bp, uint16_t vf);
98 void bnxt_free_all_filters(struct bnxt *bp);
99 void bnxt_free_filter_mem(struct bnxt *bp);
100 int bnxt_alloc_filter_mem(struct bnxt *bp);
101 struct bnxt_filter_info *bnxt_get_unused_filter(struct bnxt *bp);
102 void bnxt_free_filter(struct bnxt *bp, struct bnxt_filter_info *filter);
103 struct bnxt_filter_info *bnxt_get_l2_filter(struct bnxt *bp,
H A Dbnxt_rxr.c138 struct bnxt *bp = arg; in bnxt_rx_ring_reset() local
146 rxq = bp->rx_queues[i]; in bnxt_rx_ring_reset()
398 if (BNXT_CHIP_P5(bp)) in bnxt_discard_rx()
433 if (BNXT_CHIP_P5(rxq->bp)) { in bnxt_tpa_end()
734 if (BNXT_GFID_ENABLED(bp)) in bnxt_ulp_set_mark_in_mbuf()
856 struct bnxt *bp = rxq->bp; in bnxt_rx_pkt() local
950 if (BNXT_TRUFLOW_EN(bp)) in bnxt_rx_pkt()
995 if (BNXT_TRUFLOW_EN(bp) && (BNXT_VF_IS_TRUSTED(bp) || BNXT_PF(bp)) && in bnxt_rx_pkt()
1155 if (!bp->rx_queues) in bnxt_free_rx_rings()
1159 rxq = bp->rx_queues[i]; in bnxt_free_rx_rings()
[all …]
H A Dbnxt_ring.h69 int bnxt_alloc_ring_grps(struct bnxt *bp);
70 int bnxt_alloc_rings(struct bnxt *bp, unsigned int socket_id, uint16_t qidx,
76 int bnxt_alloc_hwrm_rx_ring(struct bnxt *bp, int queue_index);
77 int bnxt_alloc_hwrm_rings(struct bnxt *bp);
78 int bnxt_alloc_async_cp_ring(struct bnxt *bp);
79 void bnxt_free_async_cp_ring(struct bnxt *bp);
80 int bnxt_alloc_async_ring_struct(struct bnxt *bp);
81 int bnxt_alloc_rxtx_nq_ring(struct bnxt *bp);
82 void bnxt_free_rxtx_nq_ring(struct bnxt *bp);
H A Dbnxt_irq.h17 int bnxt_free_int(struct bnxt *bp);
18 void bnxt_disable_int(struct bnxt *bp);
19 void bnxt_enable_int(struct bnxt *bp);
20 int bnxt_setup_int(struct bnxt *bp);
21 int bnxt_request_int(struct bnxt *bp);
/dpdk/drivers/net/bnxt/tf_ulp/
H A Dbnxt_ulp.c465 parms.bp = bp; in ulp_ctx_shared_session_open()
587 params.bp = bp; in ulp_ctx_session_open()
868 bnxt_ulp_cntxt_tfp_set(bp->ulp_ctx, &bp->tfp); in ulp_ctx_init()
985 bnxt_ulp_cntxt_tfp_set(bp->ulp_ctx, &bp->tfp); in ulp_ctx_attach()
1052 if (!bp) in ulp_session_init()
1185 if (!bp->ulp_ctx || !bp->ulp_ctx->cfg_data) in bnxt_ulp_destroy_vfr_default_rules()
1221 if (!bp->ulp_ctx || !bp->ulp_ctx->cfg_data) in bnxt_ulp_deinit()
1241 ulp_eem_tbl_scope_deinit(bp, bp->ulp_ctx); in bnxt_ulp_deinit()
1303 rc = ulp_dparms_init(bp, bp->ulp_ctx); in bnxt_ulp_init()
1410 if (!BNXT_PF(bp) && !BNXT_VF_IS_TRUSTED(bp)) { in bnxt_ulp_port_init()
[all …]
H A Dbnxt_tf_pmd_shim.c23 struct bnxt *bp; in bnxt_pmd_get_bp() local
43 return bp; in bnxt_pmd_get_bp()
58 if (bp == NULL) { in bnxt_rss_config_action_apply()
173 struct bnxt *bp; in bnxt_pmd_get_svif() local
189 return func_svif ? bp->func_svif : bp->port_svif; in bnxt_pmd_get_svif()
197 struct bnxt *bp; in bnxt_pmd_get_iface_mac() local
215 struct bnxt *bp; in bnxt_pmd_get_parent_vnic_id() local
231 struct bnxt *bp; in bnxt_pmd_get_vnic_id() local
256 struct bnxt *bp; in bnxt_pmd_get_fw_func_id() local
314 return BNXT_PF(bp) ? bp->pf->port_id : bp->parent->port_id; in bnxt_pmd_get_phy_port_id()
[all …]
H A Dulp_def_rules.c446 if (!BNXT_TRUFLOW_EN(bp) || in bnxt_ulp_destroy_df_rules()
450 if (!bp->ulp_ctx || !bp->ulp_ctx->cfg_data) in bnxt_ulp_destroy_df_rules()
455 port_id = bp->eth_dev->data->port_id; in bnxt_ulp_destroy_df_rules()
511 if (!BNXT_TRUFLOW_EN(bp) || in bnxt_ulp_create_df_rules()
512 BNXT_ETH_DEV_IS_REPRESENTOR(bp->eth_dev) || !bp->ulp_ctx) in bnxt_ulp_create_df_rules()
515 port_id = bp->eth_dev->data->port_id; in bnxt_ulp_create_df_rules()
517 rc = bnxt_create_port_app_df_rule(bp, in bnxt_ulp_create_df_rules()
528 &bp->tx_cfa_action); in bnxt_ulp_create_df_rules()
530 bp->tx_cfa_action = 0; in bnxt_ulp_create_df_rules()
568 if (!bp || !BNXT_TRUFLOW_EN(bp)) in bnxt_ulp_create_vfr_default_rules()
[all …]
/dpdk/drivers/net/bnxt/tf_core/
H A Dtfp.c31 tfp_send_msg_direct(struct bnxt *bp, in tfp_send_msg_direct() argument
132 struct bnxt *bp = NULL; in tfp_get_fid() local
137 bp = (struct bnxt *)tfp->bp; in tfp_get_fid()
138 if (bp == NULL) in tfp_get_fid()
141 *fw_fid = bp->fw_fid; in tfp_get_fid()
149 struct bnxt *bp = NULL; in tfp_get_pf() local
154 bp = (struct bnxt *)tfp->bp; in tfp_get_pf()
155 if (BNXT_VF(bp) && bp->parent) { in tfp_get_pf()
156 *pf = bp->parent->fid - 1; in tfp_get_pf()
158 } else if (BNXT_PF(bp)) { in tfp_get_pf()
[all …]
/dpdk/drivers/mempool/dpaa/
H A Ddpaa_mempool.c45 struct bman_pool *bp; in dpaa_mbuf_create_pool() local
65 bp = bman_new_pool(&params); in dpaa_mbuf_create_pool()
66 if (!bp) { in dpaa_mbuf_create_pool()
70 bpid = bman_get_params(bp)->bpid; in dpaa_mbuf_create_pool()
78 ret = bman_acquire(bp, bufs, 8, 0); in dpaa_mbuf_create_pool()
80 ret = bman_acquire(bp, bufs, 1, 0); in dpaa_mbuf_create_pool()
93 bman_free_pool(bp); in dpaa_mbuf_create_pool()
101 rte_dpaa_bpid_info[bpid].bp = bp; in dpaa_mbuf_create_pool()
113 bman_free_pool(bp); in dpaa_mbuf_create_pool()
133 bman_free_pool(bp_info->bp); in dpaa_mbuf_free_pool()
[all …]
/dpdk/drivers/net/i40e/base/
H A Di40e_hmc.c74 i40e_memcpy(&sd_entry->u.bp.addr, in i40e_add_sd_table_entry()
77 sd_entry->u.bp.sd_pd_index = sd_index; in i40e_add_sd_table_entry()
87 I40E_INC_BP_REFCNT(&sd_entry->u.bp); in i40e_add_sd_table_entry()
156 i40e_memcpy(&pd_entry->bp.addr, page, in i40e_add_pd_table_entry()
158 pd_entry->bp.sd_pd_index = pd_index; in i40e_add_pd_table_entry()
159 pd_entry->bp.entry_type = I40E_SD_TYPE_PAGED; in i40e_add_pd_table_entry()
174 I40E_INC_BP_REFCNT(&pd_entry->bp); in i40e_add_pd_table_entry()
222 I40E_DEC_BP_REFCNT(&pd_entry->bp); in i40e_remove_pd_bp()
223 if (pd_entry->bp.ref_cnt) in i40e_remove_pd_bp()
258 I40E_DEC_BP_REFCNT(&sd_entry->u.bp); in i40e_prep_remove_sd_bp()
[all …]

123