Home
last modified time | relevance | path

Searched refs:sc_dev (Results 1 – 6 of 6) sorted by relevance

/linux-6.15/drivers/infiniband/hw/irdma/
H A Dhw.c78 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_puda_ce_handler()
107 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_process_ceq()
215 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_process_aeq()
634 rf->sc_dev.irq_ops->irdma_cfg_aeq(&rf->sc_dev, rf->iw_msixtbl->idx, false); in irdma_destroy_aeq()
700 rf->sc_dev.irq_ops->irdma_cfg_ceq(&rf->sc_dev, in irdma_del_ceq_0()
710 rf->sc_dev.ceq_valid = false; in irdma_del_ceq_0()
733 rf->sc_dev.irq_ops->irdma_cfg_ceq(&rf->sc_dev, msix_vec->ceq_id, in irdma_del_ceqs()
1143 rf->sc_dev.irq_ops->irdma_cfg_ceq(&rf->sc_dev, ceq_id, msix_vec->idx, true); in irdma_cfg_ceq_vector()
1172 rf->sc_dev.irq_ops->irdma_cfg_aeq(&rf->sc_dev, msix_vec->idx, true); in irdma_cfg_aeq_vector()
1282 rf->sc_dev.ceq_valid = true; in irdma_setup_ceq_0()
[all …]
H A Dverbs.c27 irdma_fw_minor_ver(&rf->sc_dev); in irdma_query_device()
377 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_alloc_pd()
764 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_roce_fill_and_set_qpctx_info()
888 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_create_qp()
1792 dma_free_coherent(rf->sc_dev.hw->device, in irdma_cq_free_rsrc()
2053 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_create_cq()
3196 iwdev->rf->sc_dev.hw_attrs.page_size_cap, in irdma_rereg_mr_trans()
3443 dev = &iwqp->iwdev->rf->sc_dev; in irdma_post_send()
4010 irdma_fw_major_ver(&iwdev->rf->sc_dev), in irdma_get_dev_fw_str()
4011 irdma_fw_minor_ver(&iwdev->rf->sc_dev)); in irdma_get_dev_fw_str()
[all …]
H A Dmain.c98 irdma_log_invalid_mtu(l2params.mtu, &iwdev->rf->sc_dev); in irdma_iidc_event_handler()
125 pe_criterr = readl(iwdev->rf->sc_dev.hw_regs[IRDMA_GLPE_CRITERR]); in irdma_iidc_event_handler()
H A Dmain.h301 struct irdma_sc_dev sc_dev; member
412 return container_of(dev, struct irdma_pci_f, sc_dev); in dev_to_rf()
H A Dutils.c530 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_cleanup_pending_cqp_op()
567 cqp_timeout.compl_cqp_cmds = atomic64_read(&rf->sc_dev.cqp->completed_ops); in irdma_wait_event()
575 irdma_check_cqp_progress(&cqp_timeout, &rf->sc_dev); in irdma_wait_event()
696 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_handle_cqp_op()
787 return &(container_of(dev, struct irdma_pci_f, sc_dev))->iwdev->ibdev; in to_ibdev()
1088 dma_free_coherent(rf->sc_dev.hw->device, iwqp->q2_ctx_mem.size, in irdma_free_qp_rsrc()
1091 dma_free_coherent(rf->sc_dev.hw->device, iwqp->kqp.dma_mem.size, in irdma_free_qp_rsrc()
1784 if (!rf->sc_dev.ceq_valid) in irdma_cqp_ws_node_cmd()
H A Dcm.c2205 if (irdma_puda_create_ah(&iwdev->rf->sc_dev, &ah_info, wait, in irdma_cm_create_ah()
2223 irdma_puda_free_ah(&iwdev->rf->sc_dev, cm_node->ah); in irdma_cm_free_ah()
2285 cm_node->dev = &iwdev->rf->sc_dev; in irdma_make_cm_node()
3154 if (iwdev->rf->sc_dev.hw_attrs.uk_attrs.hw_rev >= IRDMA_GEN_2) { in irdma_receive_ilq()
3266 cm_core->dev = &iwdev->rf->sc_dev; in irdma_setup_cm_core()
3624 dma_free_coherent(iwdev->rf->sc_dev.hw->device, in irdma_free_lsmm_rsrc()
3659 dev = &iwdev->rf->sc_dev; in irdma_accept()
4149 dev = &iwdev->rf->sc_dev; in irdma_cm_event_connected()