Home
last modified time | relevance | path

Searched refs:p_dev (Results 1 – 25 of 32) sorted by relevance

12

/f-stack/dpdk/drivers/net/dpaa/fmlib/
H A Dfm_lib.c41 t_device *p_dev; in fm_open() local
57 free(p_dev); in fm_open()
61 p_dev->id = id; in fm_open()
62 p_dev->fd = fd; in fm_open()
90 free(p_dev); in fm_close()
132 free(p_dev); in fm_pcd_open()
160 free(p_dev); in fm_pcd_close()
241 free(p_dev); in fm_pcd_net_env_characteristics_delete()
318 free(p_dev); in fm_pcd_kg_scheme_delete()
348 free(p_dev); in fm_port_open()
[all …]
H A Dfm_vsp.c26 t_device *p_dev = (t_device *)h_fm_port; in fm_port_vsp_alloc() local
44 t_device *p_dev = NULL; in fm_vsp_config() local
48 p_dev = p_fm_vsp_params->h_fm; in fm_vsp_config()
68 p_vsp_dev->h_user_priv = (t_handle)p_dev; in fm_vsp_config()
69 p_dev->owners++; in fm_vsp_config()
80 t_device *p_dev = NULL; in fm_vsp_init() local
86 p_dev = (t_device *)p_vsp_dev->h_user_priv; in fm_vsp_init()
102 t_device *p_dev = NULL; in fm_vsp_free() local
108 p_dev = (t_device *)p_vsp_dev->h_user_priv; in fm_vsp_free()
116 p_dev->owners--; in fm_vsp_free()
[all …]
/f-stack/dpdk/drivers/net/qede/base/
H A Decore_dev.c81 (u8 *)p_dev->doorbells + p_dev->db_size) { in ecore_db_rec_sanity()
85 (u8 *)p_dev->doorbells + p_dev->db_size); in ecore_db_rec_sanity()
109 &p_dev->hwfns[0] : &p_dev->hwfns[1]; in ecore_db_rec_find_hwfn()
685 struct ecore_dev *p_dev = p_hwfn->p_dev; in __ecore_llh_set_engine_affin() local
740 struct ecore_dev *p_dev = p_hwfn->p_dev; in ecore_llh_set_engine_affin() local
793 struct ecore_dev *p_dev = p_hwfn->p_dev; in ecore_llh_hw_init_pf() local
1438 struct ecore_dev *p_dev = p_hwfn->p_dev; in ecore_all_ppfids_wr() local
1581 p_hwfn->p_dev = p_dev; in ecore_init_struct()
1636 OSAL_FREE(p_dev, p_dev->fw_data); in ecore_resc_free()
1638 OSAL_FREE(p_dev, p_dev->reset_stats); in ecore_resc_free()
[all …]
H A Decore_dev_api.h22 void ecore_init_dp(struct ecore_dev *p_dev,
40 void ecore_resc_free(struct ecore_dev *p_dev);
56 void ecore_resc_setup(struct ecore_dev *p_dev);
132 enum _ecore_status_t ecore_hw_init(struct ecore_dev *p_dev,
142 void ecore_hw_timers_stop_all(struct ecore_dev *p_dev);
172 void ecore_prepare_hibernate(struct ecore_dev *p_dev);
301 void ecore_hw_remove(struct ecore_dev *p_dev);
434 ecore_chain_alloc(struct ecore_dev *p_dev,
449 void ecore_chain_free(struct ecore_dev *p_dev,
499 u8 ecore_llh_get_num_ppfid(struct ecore_dev *p_dev);
[all …]
H A Decore_int.c132 DP_INFO(p_hwfn->p_dev, in ecore_pswhst_attn_cb()
163 DP_INFO(p_hwfn->p_dev, in ecore_pswhst_attn_cb()
1296 DP_ERR(p_hwfn->p_dev, in ecore_int_sp_dpc()
1317 DP_ERR(p_hwfn->p_dev, in ecore_int_sp_dpc()
1329 DP_ERR(p_hwfn->p_dev, in ecore_int_sp_dpc()
1456 struct ecore_dev *p_dev = p_hwfn->p_dev; in ecore_int_sb_attn_alloc() local
1473 OSAL_FREE(p_dev, p_sb); in ecore_int_sb_attn_alloc()
1492 struct ecore_dev *p_dev = p_hwfn->p_dev; in ecore_init_cau_sb_entry() local
1544 if (IS_VF(p_hwfn->p_dev)) in _ecore_int_cau_conf_pi()
1773 sb_info->p_dev = p_hwfn->p_dev; in ecore_int_sb_init()
[all …]
H A Decore_init_ops.c24 void ecore_init_iro_array(struct ecore_dev *p_dev) in ecore_init_iro_array() argument
26 p_dev->iro_arr = iro_arr + E4_IRO_ARR_OFFSET; in ecore_init_iro_array()
119 if (IS_VF(p_hwfn->p_dev)) in ecore_init_alloc()
130 OSAL_FREE(p_hwfn->p_dev, rt_data->b_valid); in ecore_init_alloc()
214 struct ecore_dev *p_dev = p_hwfn->p_dev; local
220 array_data = p_dev->fw_data->arr_data;
360 if (CHIP_REV_IS_EMUL(p_hwfn->p_dev))
421 struct ecore_dev *p_dev = p_hwfn->p_dev; local
474 struct ecore_dev *p_dev = p_hwfn->p_dev; local
481 init = p_dev->fw_data->init_ops;
[all …]
H A Dbcm_osal.c127 void *osal_dma_alloc_coherent(struct ecore_dev *p_dev, in osal_dma_alloc_coherent() argument
136 DP_ERR(p_dev, "Memzone allocation count exceeds %u\n", in osal_dma_alloc_coherent()
151 DP_ERR(p_dev, "Unable to allocate DMA memory " in osal_dma_alloc_coherent()
159 DP_VERBOSE(p_dev, ECORE_MSG_SP, in osal_dma_alloc_coherent()
175 DP_ERR(p_dev, "Memzone allocation count exceeds %u\n", in osal_dma_alloc_coherent_aligned()
190 DP_ERR(p_dev, "Unable to allocate DMA memory " in osal_dma_alloc_coherent_aligned()
198 DP_VERBOSE(p_dev, ECORE_MSG_SP, in osal_dma_alloc_coherent_aligned()
211 DP_VERBOSE(p_dev, ECORE_MSG_SP, in osal_dma_free_mem()
223 DP_ERR(p_dev, "Unexpected memory free request\n"); in osal_dma_free_mem()
332 qede_hw_err_handler(p_hwfn->p_dev, err_type); in qede_hw_err_notify()
[all …]
H A Decore_spq.c49 OSAL_SMP_WMB(p_hwfn->p_dev); in ecore_spq_blocking_cb()
70 OSAL_SMP_RMB(p_hwfn->p_dev); in __ecore_spq_block()
273 OSAL_WMB(p_hwfn->p_dev); in ecore_spq_hw_post()
278 OSAL_WMB(p_hwfn->p_dev); in ecore_spq_hw_post()
357 OSAL_MMIOWB(p_hwfn->p_dev); in ecore_eq_prod_update()
455 OSAL_FREE(p_hwfn->p_dev, p_eq); in ecore_eq_alloc()
483 if (IS_VF(p_hwfn->p_dev)) in ecore_cqe_completion()
625 OSAL_FREE(p_hwfn->p_dev, p_spq); in ecore_spq_alloc()
656 OSAL_FREE(p_hwfn->p_dev, p_spq); in ecore_spq_free()
918 OSAL_FREE(p_hwfn->p_dev, p_ent); in ecore_spq_post()
[all …]
H A Decore.h134 ~((1 << (p_hwfn->p_dev->cache_shift)) - 1))
193 #define for_each_hwfn(p_dev, i) for (i = 0; i < p_dev->num_hwfns; i++) argument
586 struct ecore_dev *p_dev; member
836 (((_p_hwfn)->p_dev)->mf_mode == ECORE_MF_DEFAULT)
838 (((_p_hwfn)->p_dev)->mf_mode == ECORE_MF_NPAR)
840 (((_p_hwfn)->p_dev)->mf_mode == ECORE_MF_OVLAN)
894 #define IS_ECORE_SRIOV(p_dev) (!!(p_dev)->p_iov_info) argument
966 u32 ecore_get_hsi_def_val(struct ecore_dev *p_dev,
1072 ecore_device_num_ports((_p_hwfn)->p_dev))
1079 (ECORE_IS_BB((_p_hwfn)->p_dev) ? \
[all …]
H A Decore_l2.c52 if (IS_PF(p_hwfn->p_dev)) { in ecore_l2_alloc()
110 OSAL_VFREE(p_hwfn->p_dev, in ecore_l2_free()
232 if (IS_VF(p_hwfn->p_dev)) { in _ecore_eth_queue_to_cid()
452 if (IS_VF(p_hwfn->p_dev)) in ecore_sp_vport_start()
878 if (IS_VF(p_dev)) { in ecore_filter_accept_cmd()
1666 if (IS_VF(p_dev)) { in ecore_filter_mcast_cmd()
1694 if (IS_VF(p_dev)) { in ecore_filter_ucast_cmd()
2007 if (IS_PF(p_dev)) { in _ecore_get_vport_stats()
2035 if (!p_dev) { in ecore_get_vport_stats()
2081 if (IS_PF(p_dev)) in ecore_reset_vport_stats()
[all …]
H A Decore_mcp.c1584 if (IS_VF(p_hwfn->p_dev)) in ecore_get_process_kill_counter()
1603 struct ecore_dev *p_dev = p_hwfn->p_dev; in ecore_mcp_handle_process_kill() local
2260 if (IS_VF(p_hwfn->p_dev)) in ecore_mcp_get_mbi_ver()
2292 if (IS_VF(p_hwfn->p_dev)) in ecore_mcp_get_media_type()
2737 struct ecore_dev *p_dev = p_hwfn->p_dev; in ecore_start_recovery_process() local
3214 OSAL_MEMCPY(p_buf, &p_dev->mcp_nvm_resp, sizeof(p_dev->mcp_nvm_resp)); in ecore_mcp_nvm_resp()
4459 struct ecore_dev *p_dev = p_hwfn->p_dev; in ecore_mcp_get_engine_config() local
4479 p_dev->fir_affin = in ecore_mcp_get_engine_config()
4486 p_dev->l2_affin_hint = in ecore_mcp_get_engine_config()
4492 fir_valid, p_dev->fir_affin, l2_valid, p_dev->l2_affin_hint); in ecore_mcp_get_engine_config()
[all …]
H A Decore_iov_api.h20 #define IS_VF(p_dev) ((p_dev)->b_is_vf) argument
21 #define IS_PF(p_dev) (!((p_dev)->b_is_vf)) argument
23 #define IS_PF_SRIOV(p_hwfn) (!!((p_hwfn)->p_dev->p_iov_info))
186 void ecore_iov_set_vfs_to_disable(struct ecore_dev *p_dev,
197 void ecore_iov_set_vf_to_disable(struct ecore_dev *p_dev,
717 enum _ecore_status_t ecore_iov_configure_min_tx_rate(struct ecore_dev *p_dev,
H A Decore_sriov.c471 DP_NOTICE(p_dev, false, in ecore_iov_pci_cfg_info()
660 OSAL_FREE(p_dev, p_dev->p_iov_info); in ecore_iov_free_hw_info()
665 struct ecore_dev *p_dev = p_hwfn->p_dev; in ecore_iov_hw_info() local
669 if (IS_VF(p_hwfn->p_dev)) in ecore_iov_hw_info()
682 p_dev->p_iov_info = OSAL_ZALLOC(p_dev, GFP_KERNEL, in ecore_iov_hw_info()
684 if (!p_dev->p_iov_info) { in ecore_iov_hw_info()
702 OSAL_FREE(p_dev, p_dev->p_iov_info); in ecore_iov_hw_info()
742 if (IS_VF(p_hwfn->p_dev) || !IS_ECORE_SRIOV(p_hwfn->p_dev) || in _ecore_iov_pf_sanity_check()
764 for_each_hwfn(p_dev, i) { in ecore_iov_set_vf_to_disable()
780 if (!IS_ECORE_SRIOV(p_dev)) in ecore_iov_set_vfs_to_disable()
[all …]
H A Decore_int_api.h44 struct ecore_dev *p_dev; member
79 OSAL_MMIOWB(sb_info->p_dev); in ecore_sb_update_sb_idx()
118 OSAL_MMIOWB(sb_info->p_dev); in ecore_sb_ack()
119 OSAL_BARRIER(sb_info->p_dev); in ecore_sb_ack()
310 void ecore_int_disable_post_isr_release(struct ecore_dev *p_dev);
321 void ecore_int_attn_clr_enable(struct ecore_dev *p_dev, bool clr_enable);
H A Decore_hw.c244 if (!p_hwfn->p_dev->chk_reg_fifo) in ecore_is_reg_fifo_empty()
252 if (CHIP_REV_IS_SLOW(p_hwfn->p_dev)) in ecore_is_reg_fifo_empty()
275 if (CHIP_REV_IS_SLOW(p_hwfn->p_dev)) in ecore_wr()
299 if (CHIP_REV_IS_SLOW(p_hwfn->p_dev)) in ecore_rd()
323 if (IS_PF(p_hwfn->p_dev)) { in ecore_memcpy_hw()
711 OSAL_BARRIER(p_hwfn->p_dev); in ecore_dmae_operation_wait()
715 DP_NOTICE(p_hwfn->p_dev, false, in ecore_dmae_operation_wait()
727 OSAL_BARRIER(p_hwfn->p_dev); in ecore_dmae_operation_wait()
790 OSAL_DMA_SYNC(p_hwfn->p_dev, in ecore_dmae_execute_sub_operation()
802 OSAL_DMA_SYNC(p_hwfn->p_dev, in ecore_dmae_execute_sub_operation()
[all …]
H A Decore_sp_commands.c190 struct ecore_tunnel_info *p_tun = &p_hwfn->p_dev->tunnel; in ecore_tunn_set_pf_update_params()
236 if (ECORE_IS_BB_A0(p_hwfn->p_dev)) { in ecore_set_hw_tunn_mode_port()
258 struct ecore_tunnel_info *p_tun = &p_hwfn->p_dev->tunnel; in ecore_tunn_set_pf_start_params()
260 if (ECORE_IS_BB_A0(p_hwfn->p_dev)) { in ecore_tunn_set_pf_start_params()
338 if (OSAL_GET_BIT(ECORE_MF_OVLAN_CLSS, &p_hwfn->p_dev->mf_bits)) in ecore_sp_pf_start()
348 &p_hwfn->p_dev->mf_bits)) { in ecore_sp_pf_start()
382 &p_hwfn->p_dev->mf_bits)) in ecore_sp_pf_start()
395 if (p_hwfn->p_dev->p_iov_info) { in ecore_sp_pf_start()
416 &p_hwfn->p_dev->tunnel); in ecore_sp_pf_start()
565 if (IS_VF(p_hwfn->p_dev)) in ecore_sp_pf_update_tunn_cfg()
[all …]
H A Decore_vf.c117 OSAL_WMB(p_hwfn->p_dev); in ecore_send_msg2pf()
196 OSAL_DMA_FREE_COHERENT(p_hwfn->p_dev, in _ecore_vf_pf_release()
201 OSAL_DMA_FREE_COHERENT(p_hwfn->p_dev, in _ecore_vf_pf_release()
208 OSAL_DMA_FREE_COHERENT(p_hwfn->p_dev, in _ecore_vf_pf_release()
292 struct ecore_dev *p_dev = p_hwfn->p_dev; in ecore_vf_pf_acquire() local
479 ECORE_IS_BB(p_dev) ? "BB" : "AH", in ecore_vf_pf_acquire()
488 p_dev->num_hwfns = 2; in ecore_vf_pf_acquire()
537 p_hwfn->p_dev->num_hwfns = 1; in ecore_vf_hw_prepare()
649 ECORE_IS_CMT(p_hwfn->p_dev)) { in ecore_vf_hw_prepare()
663 p_hwfn->p_dev->doorbells); in ecore_vf_hw_prepare()
[all …]
H A Decore_l2_api.h168 ecore_filter_ucast_cmd(struct ecore_dev *p_dev,
175 ecore_filter_mcast_cmd(struct ecore_dev *p_dev,
183 struct ecore_dev *p_dev,
433 void ecore_get_vport_stats(struct ecore_dev *p_dev,
436 void ecore_reset_vport_stats(struct ecore_dev *p_dev);
H A Decore_cxt.c700 OSAL_DMA_FREE_COHERENT(p_hwfn->p_dev, in ecore_cxt_src_t2_free()
705 OSAL_FREE(p_hwfn->p_dev, p_t2->dma_mem); in ecore_cxt_src_t2_free()
854 OSAL_DMA_FREE_COHERENT(p_hwfn->p_dev, in ecore_ilt_shadow_free()
859 OSAL_FREE(p_hwfn->p_dev, p_mngr->ilt_shadow); in ecore_ilt_shadow_free()
984 OSAL_FREE(p_hwfn->p_dev, in ecore_cid_map_free()
1122 if (p_hwfn->p_dev->p_iov_info) in ecore_cxt_mngr_alloc()
1137 max_num_vfs = NUM_OF_VFS(p_hwfn->p_dev); in ecore_cxt_mngr_alloc()
1512 if (p_hwfn->p_dev->p_iov_info) { in ecore_ilt_vf_bounds_init()
1732 (NUM_OF_VFS(p_hwfn->p_dev) + in ecore_tm_init_pf()
1978 &p_hwfn->p_dev->mf_bits)) in ecore_cxt_set_pf_params()
[all …]
H A Decore_mcp_api.h783 enum _ecore_status_t ecore_recovery_prolog(struct ecore_dev *p_dev);
887 enum _ecore_status_t ecore_mcp_nvm_set_secure_mode(struct ecore_dev *p_dev,
901 enum _ecore_status_t ecore_mcp_phy_write(struct ecore_dev *p_dev, u32 cmd,
915 enum _ecore_status_t ecore_mcp_nvm_write(struct ecore_dev *p_dev, u32 cmd,
926 enum _ecore_status_t ecore_mcp_nvm_put_file_begin(struct ecore_dev *p_dev,
937 enum _ecore_status_t ecore_mcp_nvm_del_file(struct ecore_dev *p_dev,
948 enum _ecore_status_t ecore_mcp_nvm_resp(struct ecore_dev *p_dev, u8 *p_buf);
961 enum _ecore_status_t ecore_mcp_phy_read(struct ecore_dev *p_dev, u32 cmd,
974 enum _ecore_status_t ecore_mcp_nvm_read(struct ecore_dev *p_dev, u32 addr,
H A Decore_init_ops.h18 void ecore_init_iro_array(struct ecore_dev *p_dev);
H A Decore_dcbx.c151 if (OSAL_GET_BIT(ECORE_MF_UFP_SPECIFIC, &p_hwfn->p_dev->mf_bits)) in ecore_dcbx_set_params()
297 &p_hwfn->p_dev->mf_bits) && !eth_tlv) in ecore_dcbx_process_tlv()
928 p_hwfn->p_dcbx_info = OSAL_ZALLOC(p_hwfn->p_dev, GFP_KERNEL, in ecore_dcbx_info_alloc()
944 OSAL_FREE(p_hwfn->p_dev, p_hwfn->p_dcbx_info); in ecore_dcbx_info_free()
984 if (IS_VF(p_hwfn->p_dev)) in ecore_dcbx_query_params()
1287 dcbx_info = OSAL_ALLOC(p_hwfn->p_dev, GFP_KERNEL, in ecore_dcbx_get_config_params()
1296 OSAL_FREE(p_hwfn->p_dev, dcbx_info); in ecore_dcbx_get_config_params()
1321 OSAL_FREE(p_hwfn->p_dev, dcbx_info); in ecore_dcbx_get_config_params()
1565 p_dcbx_info = OSAL_ALLOC(p_hwfn->p_dev, GFP_KERNEL, in ecore_dcbx_get_dscp_priority()
1574 OSAL_FREE(p_hwfn->p_dev, p_dcbx_info); in ecore_dcbx_get_dscp_priority()
[all …]
/f-stack/dpdk/drivers/net/qede/
H A Dqede_logs.h12 #define DP_ERR(p_dev, fmt, ...) \ argument
16 (p_dev)->name ? (p_dev)->name : "", \
19 #define DP_NOTICE(p_dev, is_assert, fmt, ...) \ argument
24 (p_dev)->name ? (p_dev)->name : "", \
30 (p_dev)->name ? (p_dev)->name : "", \
35 #define DP_INFO(p_dev, fmt, ...) \ argument
39 (p_dev)->name ? (p_dev)->name : "", \
42 #define DP_VERBOSE(p_dev, module, fmt, ...) \ argument
44 if ((p_dev)->dp_module & module) \
48 (p_dev)->name ? (p_dev)->name : "", \
H A Dqede_sriov.c117 i, hwfn->p_dev->p_iov_info->first_vf_in_pf + i); in qed_handle_vf_msg()
197 struct ecore_hwfn *lead_hwfn = ECORE_LEADING_HWFN(hwfn->p_dev); in qed_inform_vf_link_state()
213 for (i = 0; i < hwfn->p_dev->p_iov_info->total_vfs; i++) { in qed_inform_vf_link_state()
/f-stack/dpdk/drivers/bus/vdev/
H A Dvdev.c255 struct rte_vdev_device **p_dev, in insert_vdev() argument
294 if (p_dev) in insert_vdev()
295 *p_dev = dev; in insert_vdev()

12