Home
last modified time | relevance | path

Searched refs:nb_rx_queues (Results 1 – 25 of 159) sorted by relevance

1234567

/f-stack/dpdk/drivers/net/ring/
H A Drte_eth_ring.c29 const unsigned int nb_rx_queues; member
182 i < dev->data->nb_rx_queues; i++) { in eth_stats_get()
205 for (i = 0; i < dev->data->nb_rx_queues; i++) in eth_stats_reset()
288 const unsigned int nb_rx_queues, in do_eth_dev_ring_create() argument
346 internals->max_rx_queues = nb_rx_queues; in do_eth_dev_ring_create()
348 for (i = 0; i < nb_rx_queues; i++) { in do_eth_dev_ring_create()
358 data->nb_rx_queues = (uint16_t)nb_rx_queues; in do_eth_dev_ring_create()
388 const unsigned int nb_rx_queues, in rte_eth_from_rings() argument
395 .nb_rx_queues = nb_rx_queues, in rte_eth_from_rings()
407 if (rx_queues == NULL && nb_rx_queues > 0) { in rte_eth_from_rings()
[all …]
H A Drte_eth_ring.h34 const unsigned nb_rx_queues,
/f-stack/dpdk/drivers/net/failsafe/
H A Dfailsafe_intr.c294 if (dev->data->nb_rx_queues > fsdev->data->nb_rx_queues) { in failsafe_eth_rx_intr_ctl_subdevice()
299 for (qid = 0; qid < dev->data->nb_rx_queues; qid++) { in failsafe_eth_rx_intr_ctl_subdevice()
340 for (qid = 0; qid < ETH(sdev)->data->nb_rx_queues; qid++) { in failsafe_rx_intr_install_subdevice()
371 for (qid = 0; qid < ETH(sdev)->data->nb_rx_queues; qid++) { in failsafe_rx_intr_uninstall_subdevice()
372 if (qid < fsdev->data->nb_rx_queues) { in failsafe_rx_intr_uninstall_subdevice()
439 rxqs_n = priv->data->nb_rx_queues; in fs_rx_intr_vec_install()
/f-stack/dpdk/lib/librte_eventdev/
H A Drte_event_eth_rx_adapter.c574 uint16_t nb_rx_queues; in rxa_calc_wrr_sequence() local
577 nb_rx_queues = dev_info->dev->data->nb_rx_queues; in rxa_calc_wrr_sequence()
1657 uint16_t nb_rx_queues; in rxa_sw_del() local
1660 nb_rx_queues = dev_info->dev->data->nb_rx_queues; in rxa_sw_del()
1661 for (i = 0; i < nb_rx_queues; i++) in rxa_sw_del()
1691 uint16_t nb_rx_queues; in rxa_add_queue() local
1694 nb_rx_queues = dev_info->dev->data->nb_rx_queues; in rxa_add_queue()
1695 for (i = 0; i < nb_rx_queues; i++) in rxa_add_queue()
1755 uint16_t nb_rx_queues; in rxa_sw_add() local
1772 nb_rx_queues = dev_info->dev->data->nb_rx_queues; in rxa_sw_add()
[all …]
/f-stack/dpdk/drivers/net/nfb/
H A Dnfb_ethdev.c120 uint16_t nb_rx = dev->data->nb_rx_queues; in nfb_eth_dev_start()
158 uint16_t nb_rx = dev->data->nb_rx_queues; in nfb_eth_dev_stop()
201 dev_info->max_rx_queues = dev->data->nb_rx_queues; in nfb_eth_dev_info()
221 uint16_t nb_rx = dev->data->nb_rx_queues; in nfb_eth_dev_close()
237 dev->data->nb_rx_queues = 0; in nfb_eth_dev_close()
476 data->nb_rx_queues = ndp_get_rx_queue_available_count(internals->nfb); in nfb_eth_dev_init()
480 data->nb_rx_queues, data->nb_tx_queues); in nfb_eth_dev_init()
H A Dnfb_stats.c15 uint16_t nb_rx = dev->data->nb_rx_queues; in nfb_eth_stats_get()
59 uint16_t nb_rx = dev->data->nb_rx_queues; in nfb_eth_stats_reset()
/f-stack/dpdk/drivers/net/ice/
H A Dice_dcf_ethdev.c103 for (i = 0; i < dev->data->nb_rx_queues; i++) { in ice_dcf_init_rx_queues()
152 dev->data->nb_rx_queues * sizeof(int), 0); in ice_dcf_config_rx_queues_irqs()
155 dev->data->nb_rx_queues); in ice_dcf_config_rx_queues_irqs()
190 for (i = 0; i < dev->data->nb_rx_queues; i++) in ice_dcf_config_rx_queues_irqs()
196 for (i = 0; i < dev->data->nb_rx_queues; i++) { in ice_dcf_config_rx_queues_irqs()
211 for (i = 0; i < dev->data->nb_rx_queues; i++) { in ice_dcf_config_rx_queues_irqs()
219 hw->nb_msix, dev->data->nb_rx_queues); in ice_dcf_config_rx_queues_irqs()
276 if (rx_queue_id >= dev->data->nb_rx_queues) in ice_dcf_rx_queue_start()
381 if (rx_queue_id >= dev->data->nb_rx_queues) in ice_dcf_rx_queue_stop()
509 hw->num_queue_pairs = RTE_MAX(dev->data->nb_rx_queues, in ice_dcf_dev_start()
[all …]
/f-stack/dpdk/drivers/net/sfc/
H A Dsfc_rx.c1614 while (--sw_index >= (int)nb_rx_queues) { in sfc_rx_fini_queues()
1619 sas->rxq_count = nb_rx_queues; in sfc_rx_fini_queues()
1636 const unsigned int nb_rx_queues = sa->eth_dev->data->nb_rx_queues; in sfc_rx_configure() local
1640 nb_rx_queues, sas->rxq_count); in sfc_rx_configure()
1646 if (nb_rx_queues == sas->rxq_count) in sfc_rx_configure()
1669 if (nb_rx_queues < sas->rxq_count) in sfc_rx_configure()
1670 sfc_rx_fini_queues(sa, nb_rx_queues); in sfc_rx_configure()
1687 if (nb_rx_queues > sas->rxq_count) { in sfc_rx_configure()
1689 (nb_rx_queues - sas->rxq_count) * in sfc_rx_configure()
1692 (nb_rx_queues - sas->rxq_count) * in sfc_rx_configure()
[all …]
H A Dsfc_intr.c168 intr_vector = sa->eth_dev->data->nb_rx_queues; in sfc_intr_start()
176 sa->eth_dev->data->nb_rx_queues, sizeof(int), in sfc_intr_start()
181 sa->eth_dev->data->nb_rx_queues); in sfc_intr_start()
H A Dsfc_ev.h82 return 1 + sa->eth_dev->data->nb_rx_queues + txq_sw_index; in sfc_evq_index_by_txq_sw_index()
/f-stack/dpdk/drivers/net/mlx4/
H A Dmlx4_rxq.c192 if (id < dev->data->nb_rx_queues) in mlx4_rss_attach()
334 uint8_t log2_range = rte_log2_u32(dev->data->nb_rx_queues); in mlx4_rss_init()
342 if (ETH_DEV(priv)->data->nb_rx_queues > priv->hw_rss_max_qps) { in mlx4_rss_init()
356 1 << log2_range, dev->data->nb_rx_queues, strerror(ret)); in mlx4_rss_init()
360 for (i = 0; i != ETH_DEV(priv)->data->nb_rx_queues; ++i) { in mlx4_rss_init()
461 for (i = 0; i != ETH_DEV(priv)->data->nb_rx_queues; ++i) { in mlx4_rss_deinit()
762 if (idx >= dev->data->nb_rx_queues) { in mlx4_rx_queue_setup()
765 (void *)dev, idx, dev->data->nb_rx_queues); in mlx4_rx_queue_setup()
926 for (i = 0; i != ETH_DEV(priv)->data->nb_rx_queues; ++i) in mlx4_rx_queue_release()
/f-stack/dpdk/drivers/net/thunderx/
H A Dnicvf_ethdev.h130 *rx_end = dev->data->nb_rx_queues ? in nicvf_rx_range()
131 RTE_MIN(tmp, dev->data->nb_rx_queues - 1) : 0; in nicvf_rx_range()
/f-stack/dpdk/drivers/net/pcap/
H A Drte_eth_pcap.c583 for (i = 0; i < dev->data->nb_rx_queues; i++) { in eth_dev_start()
599 for (i = 0; i < dev->data->nb_rx_queues; i++) in eth_dev_start()
642 for (i = 0; i < dev->data->nb_rx_queues; i++) { in eth_dev_stop()
650 for (i = 0; i < dev->data->nb_rx_queues; i++) in eth_dev_stop()
676 dev_info->max_rx_queues = dev->data->nb_rx_queues; in eth_dev_info()
693 i < dev->data->nb_rx_queues; i++) { in eth_stats_get()
724 for (i = 0; i < dev->data->nb_rx_queues; i++) { in eth_stats_reset()
754 for (i = 0; i < dev->data->nb_rx_queues; i++) { in eth_dev_close()
1114 const unsigned int nb_rx_queues, in pmd_init_internals() argument
1160 data->nb_rx_queues = (uint16_t)nb_rx_queues; in pmd_init_internals()
[all …]
/f-stack/dpdk/drivers/net/hns3/
H A Dhns3_stats.c518 num = RTE_MIN(RTE_ETHDEV_QUEUE_STAT_CNTRS, eth_dev->data->nb_rx_queues); in hns3_stats_get()
585 for (i = 0; i != eth_dev->data->nb_rx_queues; ++i) { in hns3_stats_reset()
639 int bderr_stats = dev->data->nb_rx_queues * HNS3_NUM_RX_BD_ERROR_XSTATS; in hns3_xstats_calc_num()
641 int rx_queue_stats = dev->data->nb_rx_queues * HNS3_NUM_RX_QUEUE_STATS; in hns3_xstats_calc_num()
662 for (j = 0; j < dev->data->nb_rx_queues; j++) { in hns3_get_queue_stats()
773 for (j = 0; j < dev->data->nb_rx_queues; j++) { in hns3_dev_xstats_get()
859 for (j = 0; j < dev->data->nb_rx_queues; j++) { in hns3_dev_xstats_get_names()
879 for (j = 0; j < dev->data->nb_rx_queues; j++) { in hns3_dev_xstats_get_names()
/f-stack/dpdk/drivers/net/atlantic/
H A Datl_rxtx.c349 for (i = 0; i < eth_dev->data->nb_rx_queues; i++) { in atl_rx_init()
387 (eth_dev->data->nb_rx_queues - 1); in atl_rx_init()
451 if (rx_queue_id < dev->data->nb_rx_queues) { in atl_rx_queue_start()
483 if (rx_queue_id < dev->data->nb_rx_queues) { in atl_rx_queue_stop()
592 for (i = 0; i < dev->data->nb_rx_queues; i++) { in atl_free_queues()
596 dev->data->nb_rx_queues = 0; in atl_free_queues()
621 for (i = 0; i < dev->data->nb_rx_queues; i++) { in atl_start_queues()
649 for (i = 0; i < dev->data->nb_rx_queues; i++) { in atl_stop_queues()
698 if (rx_queue_id >= dev->data->nb_rx_queues) { in atl_rx_queue_count()
772 if (queue_id >= dev->data->nb_rx_queues) { in atl_rx_enable_intr()
/f-stack/dpdk/drivers/net/dpaa/
H A Ddpaa_flow.c340 dpaa_intf->nb_rx_queues; in set_scheme_params()
789 if (!dev->data->nb_rx_queues) in dpaa_fm_config()
792 if (dev->data->nb_rx_queues & (dev->data->nb_rx_queues - 1)) { in dpaa_fm_config()
797 dpaa_intf->nb_rx_queues = dev->data->nb_rx_queues; in dpaa_fm_config()
807 for (i = 0; i < dpaa_intf->nb_rx_queues; i++) in dpaa_fm_config()
/f-stack/dpdk/drivers/net/iavf/
H A Diavf_ethdev.c254 nb_q = RTE_MIN(adapter->eth_dev->data->nb_rx_queues, in iavf_init_rss()
460 for (i = 0; i < dev->data->nb_rx_queues; i++) { in iavf_init_queues()
496 dev->data->nb_rx_queues * sizeof(int), 0); in iavf_config_rx_queues_irqs()
499 dev->data->nb_rx_queues); in iavf_config_rx_queues_irqs()
508 dev->data->nb_rx_queues); in iavf_config_rx_queues_irqs()
551 for (i = 0; i < dev->data->nb_rx_queues; i++) { in iavf_config_rx_queues_irqs()
560 for (i = 0; i < dev->data->nb_rx_queues; i++) { in iavf_config_rx_queues_irqs()
577 for (i = 0; i < dev->data->nb_rx_queues; i++) { in iavf_config_rx_queues_irqs()
587 vf->nb_msix, dev->data->nb_rx_queues); in iavf_config_rx_queues_irqs()
597 uint16_t num_qv_maps = dev->data->nb_rx_queues; in iavf_config_rx_queues_irqs()
[all …]
/f-stack/dpdk/lib/librte_ethdev/
H A Dethdev_profile.c59 return vtune_profile_rx_init(port_id, dev->data->nb_rx_queues); in __rte_eth_dev_profile_init()
/f-stack/dpdk/drivers/net/octeontx2/
H A Dotx2_ethdev_irq.c205 for (q = 0; q < eth_dev->data->nb_rx_queues; q++) { in nix_lf_q_irq()
217 for (q = 0; q < eth_dev->data->nb_rx_queues; q++) { in nix_lf_q_irq()
271 rqs = RTE_MIN(dev->qints, eth_dev->data->nb_rx_queues); in oxt2_nix_register_queue_irqs()
340 eth_dev->data->nb_rx_queues); in oxt2_nix_register_cq_irqs()
/f-stack/dpdk/drivers/net/mlx5/
H A Dmlx5_ethdev.c64 unsigned int rxqs_n = dev->data->nb_rx_queues; in mlx5_dev_configure()
140 unsigned int rxqs_n = dev->data->nb_rx_queues; in mlx5_dev_configure_rss_reta()
227 if (dev->data->nb_rx_queues > 2 || in mlx5_set_default_params()
236 if (dev->data->nb_rx_queues > 2 || in mlx5_set_default_params()
/f-stack/dpdk/drivers/net/null/
H A Drte_eth_null.c223 if (rx_queue_id >= dev->data->nb_rx_queues) in eth_rx_queue_setup()
314 RTE_MIN(dev->data->nb_rx_queues, in eth_stats_get()
502 const unsigned int nb_rx_queues = 1; in eth_dev_null_create() local
547 data->nb_rx_queues = (uint16_t)nb_rx_queues; in eth_dev_null_create()
/f-stack/dpdk/drivers/net/vhost/
H A Drte_eth_vhost.c236 for (i = 0; i < dev->data->nb_rx_queues; i++) { in vhost_dev_xstats_reset()
293 for (i = 0; i < dev->data->nb_rx_queues; i++) { in vhost_dev_xstats_get()
657 int nb_rxq = dev->data->nb_rx_queues; in eth_vhost_install_intr()
733 for (i = 0; i < dev->data->nb_rx_queues; i++) { in update_queuing_status()
758 for (i = 0; i < eth_dev->data->nb_rx_queues; i++) { in queue_setup()
864 for (i = 0; i < eth_dev->data->nb_rx_queues; i++) { in destroy_device()
907 if (rx_idx >= 0 && rx_idx < eth_dev->data->nb_rx_queues && in vring_conf_update()
1194 for (i = 0; i < dev->data->nb_rx_queues; i++) in eth_dev_close()
1292 i < dev->data->nb_rx_queues; i++) { in eth_stats_get()
1329 for (i = 0; i < dev->data->nb_rx_queues; i++) { in eth_stats_reset()
[all …]
/f-stack/dpdk/drivers/net/netvsc/
H A Dhn_ethdev.c586 hv->num_queues = RTE_MAX(dev->data->nb_rx_queues, in hn_dev_configure()
590 hv->rss_ind[i] = i % dev->data->nb_rx_queues; in hn_dev_configure()
646 for (i = 0; i < dev->data->nb_rx_queues; i++) { in hn_dev_stats_get()
682 for (i = 0; i < dev->data->nb_rx_queues; i++) { in hn_dev_stats_reset()
712 count += dev->data->nb_rx_queues * RTE_DIM(hn_stat_strings); in hn_dev_xstats_count()
748 for (i = 0; i < dev->data->nb_rx_queues; i++) { in hn_dev_xstats_get_names()
801 for (i = 0; i < dev->data->nb_rx_queues; i++) { in hn_dev_xstats_get()
/f-stack/dpdk/drivers/net/liquidio/
H A Dlio_ethdev.c322 for (i = 0; i < eth_dev->data->nb_rx_queues; i++) { in lio_dev_stats_get()
360 for (i = 0; i < eth_dev->data->nb_rx_queues; i++) { in lio_dev_stats_reset()
1108 q_idx = (uint8_t)((eth_dev->data->nb_rx_queues > 1) ? in lio_dev_rss_configure()
1109 i % eth_dev->data->nb_rx_queues : 0); in lio_dev_rss_configure()
1174 if (q_no >= lio_dev->nb_rx_queues) { in lio_dev_rx_queue_setup()
1712 if (lio_dev->nb_rx_queues != num_rxq || in lio_reconf_queues()
1716 lio_dev->nb_rx_queues = num_rxq; in lio_reconf_queues()
1759 eth_dev->data->nb_rx_queues); in lio_dev_configure()
1761 lio_dev->nb_rx_queues = eth_dev->data->nb_rx_queues; in lio_dev_configure()
1765 lio_dev->max_rx_queues = eth_dev->data->nb_rx_queues; in lio_dev_configure()
[all …]
/f-stack/dpdk/drivers/net/vmxnet3/
H A Dvmxnet3_ethdev.c422 dev->data->nb_rx_queues > VMXNET3_MAX_RX_QUEUES) { in vmxnet3_dev_configure()
427 if (!rte_is_power_of_2(dev->data->nb_rx_queues)) { in vmxnet3_dev_configure()
432 size = dev->data->nb_rx_queues * sizeof(struct Vmxnet3_TxQueueDesc) + in vmxnet3_dev_configure()
438 hw->num_rx_queues = (uint8_t)dev->data->nb_rx_queues; in vmxnet3_dev_configure()
884 for (i = 0; i < dev->data->nb_rx_queues; i++) { in vmxnet3_free_queues()
889 dev->data->nb_rx_queues = 0; in vmxnet3_free_queues()
1021 dev->data->nb_rx_queues * RTE_DIM(vmxnet3_rxq_stat_strings); in vmxnet3_dev_xstats_get_names()
1026 for (i = 0; i < dev->data->nb_rx_queues; i++) { in vmxnet3_dev_xstats_get_names()
1062 dev->data->nb_rx_queues * RTE_DIM(vmxnet3_rxq_stat_strings); in vmxnet3_dev_xstats_get()
1067 for (i = 0; i < dev->data->nb_rx_queues; i++) { in vmxnet3_dev_xstats_get()

1234567