Home
last modified time | relevance | path

Searched refs:nb_q_per_pool (Results 1 – 7 of 7) sorted by relevance

/f-stack/dpdk/drivers/net/txgbe/
H A Dtxgbe_pf.c116 RTE_ETH_DEV_SRIOV(eth_dev).nb_q_per_pool = nb_queue; in txgbe_pf_host_init()
141 RTE_ETH_DEV_SRIOV(eth_dev).nb_q_per_pool = 0; in txgbe_pf_host_uninit()
377 uint8_t nb_q_per_pool; in txgbe_vf_reset_msg() local
389 nb_q_per_pool = RTE_ETH_DEV_SRIOV(eth_dev).nb_q_per_pool; in txgbe_vf_reset_msg()
390 for (i = vf * nb_q_per_pool; i < (vf + 1) * nb_q_per_pool; i++) { in txgbe_vf_reset_msg()
584 uint32_t default_q = vf * RTE_ETH_DEV_SRIOV(eth_dev).nb_q_per_pool; in txgbe_get_vf_queues()
606 msgbuf[TXGBE_VF_RX_QUEUES] = RTE_ETH_DEV_SRIOV(eth_dev).nb_q_per_pool; in txgbe_get_vf_queues()
607 msgbuf[TXGBE_VF_TX_QUEUES] = RTE_ETH_DEV_SRIOV(eth_dev).nb_q_per_pool; in txgbe_get_vf_queues()
H A Dtxgbe_ethdev.c1106 RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool = in txgbe_check_vf_rss_rxq_num()
1109 pci_dev->max_vfs * RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool; in txgbe_check_vf_rss_rxq_num()
1135 if (nb_rx_q <= RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool) in txgbe_check_mq_mode()
1170 if ((nb_rx_q > RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool) || in txgbe_check_mq_mode()
1171 (nb_tx_q > RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool)) { in txgbe_check_mq_mode()
1176 RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool); in txgbe_check_mq_mode()
1304 uint8_t nb_q_per_pool; in txgbe_set_vf_rate_limit() local
1328 nb_q_per_pool = RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool; in txgbe_set_vf_rate_limit()
1331 queue_end = queue_idx + nb_q_per_pool - 1; in txgbe_set_vf_rate_limit()
1348 for (idx = 0; idx < nb_q_per_pool; idx++) { in txgbe_set_vf_rate_limit()
/f-stack/dpdk/drivers/net/ixgbe/
H A Dixgbe_pf.c121 RTE_ETH_DEV_SRIOV(eth_dev).nb_q_per_pool = nb_queue; in ixgbe_pf_host_init()
145 RTE_ETH_DEV_SRIOV(eth_dev).nb_q_per_pool = 0; in ixgbe_pf_host_uninit()
387 uint8_t nb_q_per_pool; in ixgbe_vf_reset_msg() local
399 nb_q_per_pool = RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool; in ixgbe_vf_reset_msg()
400 for (i = vf * nb_q_per_pool; i < (vf + 1) * nb_q_per_pool; i++) { in ixgbe_vf_reset_msg()
625 uint32_t default_q = vf * RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool; in ixgbe_get_vf_queues()
651 msgbuf[IXGBE_VF_RX_QUEUES] = RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool; in ixgbe_get_vf_queues()
652 msgbuf[IXGBE_VF_TX_QUEUES] = RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool; in ixgbe_get_vf_queues()
H A Dixgbe_ethdev.c2207 RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool = in ixgbe_check_vf_rss_rxq_num()
2210 pci_dev->max_vfs * RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool; in ixgbe_check_vf_rss_rxq_num()
2237 if (nb_rx_q <= RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool) in ixgbe_check_mq_mode()
2270 if ((nb_rx_q > RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool) || in ixgbe_check_mq_mode()
2271 (nb_tx_q > RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool)) { in ixgbe_check_mq_mode()
2276 RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool); in ixgbe_check_mq_mode()
2427 uint8_t nb_q_per_pool; in ixgbe_set_vf_rate_limit() local
2451 nb_q_per_pool = RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool; in ixgbe_set_vf_rate_limit()
2454 queue_end = queue_idx + nb_q_per_pool - 1; in ixgbe_set_vf_rate_limit()
2471 for (idx = 0; idx < nb_q_per_pool; idx++) { in ixgbe_set_vf_rate_limit()
/f-stack/dpdk/drivers/net/e1000/
H A Digb_pf.c92 RTE_ETH_DEV_SRIOV(eth_dev).nb_q_per_pool = nb_queue; in igb_pf_host_init()
114 RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool = 0; in igb_pf_host_uninit()
H A Digb_ethdev.c1102 RTE_ETH_DEV_SRIOV(dev).nb_q_per_pool = 1; in igb_check_mq_mode()
/f-stack/dpdk/lib/librte_ethdev/
H A Drte_ethdev.h1788 uint8_t nb_q_per_pool; /**< rx queue number per pool */ member