Home
last modified time | relevance | path

Searched refs:tx_queues (Results 1 – 25 of 117) sorted by relevance

12345

/f-stack/dpdk/drivers/net/ring/
H A Drte_eth_ring.c30 struct rte_ring * const *tx_queues; member
152 dev->data->tx_queues[tx_queue_id] = &internals->tx_ring_queues[tx_queue_id]; in eth_tx_queue_setup()
289 struct rte_ring *const tx_queues[], in do_eth_dev_ring_create() argument
343 data->tx_queues = tx_queues_local; in do_eth_dev_ring_create()
353 internals->tx_ring_queues[i].rng = tx_queues[i]; in do_eth_dev_ring_create()
354 data->tx_queues[i] = &internals->tx_ring_queues[i]; in do_eth_dev_ring_create()
389 struct rte_ring *const tx_queues[], in rte_eth_from_rings() argument
396 .tx_queues = tx_queues, in rte_eth_from_rings()
411 if (tx_queues == NULL && nb_tx_queues > 0) { in rte_eth_from_rings()
656 internal_args->tx_queues, in rte_pmd_ring_probe()
H A Drte_eth_ring.h35 struct rte_ring *const tx_queues[],
/f-stack/dpdk/drivers/net/bnxt/
H A Dbnxt_txq.c50 txq = bp->tx_queues[i]; in bnxt_free_tx_mbufs()
113 if (eth_dev->data->tx_queues) { in bnxt_tx_queue_setup_op()
114 txq = eth_dev->data->tx_queues[queue_idx]; in bnxt_tx_queue_setup_op()
165 eth_dev->data->tx_queues[queue_idx] = txq; in bnxt_tx_queue_setup_op()
H A Dbnxt_reps.c124 ptxq = parent->tx_queues[qid]; in bnxt_rep_tx_burst()
713 if (!parent_bp->tx_queues) { in bnxt_rep_tx_queue_setup_op()
718 parent_txq = parent_bp->tx_queues[queue_idx]; in bnxt_rep_tx_queue_setup_op()
729 if (eth_dev->data->tx_queues) { in bnxt_rep_tx_queue_setup_op()
730 vfr_txq = eth_dev->data->tx_queues[queue_idx]; in bnxt_rep_tx_queue_setup_op()
756 eth_dev->data->tx_queues[queue_idx] = vfr_txq; in bnxt_rep_tx_queue_setup_op()
H A Dbnxt_txr.c27 struct bnxt_tx_queue *txq = bp->tx_queues[i]; in bnxt_free_tx_rings()
41 bp->tx_queues[i] = NULL; in bnxt_free_tx_rings()
530 struct bnxt_tx_queue *txq = bp->tx_queues[tx_queue_id]; in bnxt_tx_queue_start()
547 struct bnxt_tx_queue *txq = bp->tx_queues[tx_queue_id]; in bnxt_tx_queue_stop()
/f-stack/dpdk/drivers/net/nfb/
H A Dnfb_tx.c13 struct ndp_tx_queue *txq = dev->data->tx_queues[txq_id]; in nfb_eth_tx_queue_start()
34 struct ndp_tx_queue *txq = dev->data->tx_queues[txq_id]; in nfb_eth_tx_queue_stop()
75 dev->data->tx_queues[tx_queue_id] = txq; in nfb_eth_tx_queue_setup()
H A Dnfb_stats.c26 dev->data->tx_queues); in nfb_eth_stats_get()
65 dev->data->tx_queues); in nfb_eth_stats_reset()
/f-stack/dpdk/drivers/net/mlx4/
H A Dmlx4_txq.c141 txq = dev->data->tx_queues[i]; in mlx4_tx_uar_init_secondary()
153 txq = dev->data->tx_queues[i]; in mlx4_tx_uar_init_secondary()
325 txq = dev->data->tx_queues[idx]; in mlx4_tx_queue_setup()
483 dev->data->tx_queues[idx] = txq; in mlx4_tx_queue_setup()
487 dev->data->tx_queues[idx] = NULL; in mlx4_tx_queue_setup()
513 if (ETH_DEV(priv)->data->tx_queues[i] == txq) { in mlx4_tx_queue_release()
516 ETH_DEV(priv)->data->tx_queues[i] = NULL; in mlx4_tx_queue_release()
/f-stack/dpdk/drivers/net/hns3/
H A Dhns3_rxtx.c188 hw->fkq_data.tx_queues = NULL; in hns3_fake_tx_queue_release()
228 if (dev->data->tx_queues[i]) { in hns3_free_tx_queues()
230 dev->data->tx_queues[i] = NULL; in hns3_free_tx_queues()
237 if (fkq_data->tx_queues[i]) in hns3_free_tx_queues()
349 txq = hw->data->tx_queues[i]; in hns3_update_all_queues_pvid_proc_en()
462 txq = hw->data->tx_queues[i]; in hns3_start_all_txqs()
483 txq = hw->data->tx_queues[j]; in hns3_start_all_txqs()
538 txq = hw->data->tx_queues[i]; in hns3_restore_tqp_enable_state()
552 txq = hw->data->tx_queues[i]; in hns3_stop_all_txqs()
1160 txq = hw->data->tx_queues[i]; in hns3_start_tqps()
[all …]
/f-stack/dpdk/drivers/net/szedata2/
H A Drte_eth_szedata2.c1191 eth_tx_queue_release(dev->data->tx_queues[i]); in eth_dev_close()
1192 dev->data->tx_queues[i] = NULL; in eth_dev_close()
1309 if (dev->data->tx_queues[tx_queue_id] != NULL) { in eth_tx_queue_setup()
1311 dev->data->tx_queues[tx_queue_id] = NULL; in eth_tx_queue_setup()
1344 dev->data->tx_queues[tx_queue_id] = txq; in eth_tx_queue_setup()
1689 SZE2_DIR_TX, tx_queues * i); in get_port_info()
1693 tx_queues : 0; in get_port_info()
1697 tx_queues, tx_queues * i, numa_tx); in get_port_info()
1705 tx_queues * i, numa_tx); in get_port_info()
1726 pi[current].tx_base_id = tx_queues * i; in get_port_info()
[all …]
/f-stack/dpdk/drivers/net/kni/
H A Drte_eth_kni.c60 struct pmd_queue tx_queues[KNI_MAX_QUEUE_PER_PORT]; member
276 q = &internals->tx_queues[tx_queue_id]; in eth_kni_tx_queue_setup()
279 dev->data->tx_queues[tx_queue_id] = q; in eth_kni_tx_queue_setup()
318 q = data->tx_queues[i]; in eth_kni_stats_get()
346 q = data->tx_queues[i]; in eth_kni_stats_reset()
/f-stack/dpdk/drivers/net/axgbe/
H A Daxgbe_rxtx.c557 dev->data->tx_queues[queue_idx] = txq; in axgbe_dev_tx_queue_setup()
558 if (!pdata->tx_queues) in axgbe_dev_tx_queue_setup()
559 pdata->tx_queues = dev->data->tx_queues; in axgbe_dev_tx_queue_setup()
653 txq = dev->data->tx_queues[i]; in axgbe_dev_disable_tx()
664 txq = dev->data->tx_queues[i]; in axgbe_dev_disable_tx()
676 txq = dev->data->tx_queues[i]; in axgbe_dev_enable_tx()
824 txq = dev->data->tx_queues[i]; in axgbe_dev_clear_queues()
828 dev->data->tx_queues[i] = NULL; in axgbe_dev_clear_queues()
/f-stack/dpdk/drivers/net/bnx2x/
H A Dbnx2x_rxtx.c314 dev->data->tx_queues[queue_idx] = txq; in bnx2x_dev_tx_queue_setup()
315 if (!sc->tx_queues) sc->tx_queues = dev->data->tx_queues; in bnx2x_dev_tx_queue_setup()
496 struct bnx2x_tx_queue *txq = dev->data->tx_queues[i]; in bnx2x_dev_clear_queues()
499 dev->data->tx_queues[i] = NULL; in bnx2x_dev_clear_queues()
/f-stack/dpdk/drivers/net/vhost/
H A Drte_eth_vhost.c243 vq = dev->data->tx_queues[i]; in vhost_dev_xstats_reset()
307 vq = dev->data->tx_queues[i]; in vhost_dev_xstats_get()
743 vq = dev->data->tx_queues[i]; in update_queuing_status()
767 vq = eth_dev->data->tx_queues[i]; in queue_setup()
871 vq = eth_dev->data->tx_queues[i]; in destroy_device()
1197 if (dev->data->tx_queues) in eth_dev_close()
1199 rte_free(dev->data->tx_queues[i]); in eth_dev_close()
1305 if (dev->data->tx_queues[i] == NULL) in eth_stats_get()
1307 vq = dev->data->tx_queues[i]; in eth_stats_get()
1337 if (dev->data->tx_queues[i] == NULL) in eth_stats_reset()
[all …]
/f-stack/dpdk/drivers/net/atlantic/
H A Datl_rxtx.c249 if (dev->data->tx_queues[tx_queue_id] != NULL) { in atl_tx_queue_setup()
250 atl_tx_queue_release(dev->data->tx_queues[tx_queue_id]); in atl_tx_queue_setup()
251 dev->data->tx_queues[tx_queue_id] = NULL; in atl_tx_queue_setup()
302 dev->data->tx_queues[tx_queue_id] = txq; in atl_tx_queue_setup()
318 txq = eth_dev->data->tx_queues[i]; in atl_tx_init()
560 txq = dev->data->tx_queues[tx_queue_id]; in atl_tx_queue_stop()
599 atl_tx_queue_release(dev->data->tx_queues[i]); in atl_free_queues()
600 dev->data->tx_queues[i] = 0; in atl_free_queues()
684 txq = dev->data->tx_queues[queue_id]; in atl_txq_info_get()
/f-stack/dpdk/drivers/net/failsafe/
H A Dfailsafe_rxtx.c153 sub_txq = ETH(sdev)->data->tx_queues[txq->qid]; in failsafe_tx_burst()
173 sub_txq = ETH(sdev)->data->tx_queues[txq->qid]; in failsafe_tx_burst_fast()
H A Dfailsafe_ops.c115 txq = dev->data->tx_queues[i]; in fs_set_queues_state_start()
177 if (dev->data->tx_queues[i] != NULL) in fs_set_queues_state_stop()
558 if (ETH(sdev)->data->tx_queues != NULL && in fs_tx_queue_release()
559 ETH(sdev)->data->tx_queues[txq->qid] != NULL) { in fs_tx_queue_release()
561 (ETH(sdev)->data->tx_queues[txq->qid]); in fs_tx_queue_release()
564 dev->data->tx_queues[txq->qid] = NULL; in fs_tx_queue_release()
592 txq = dev->data->tx_queues[tx_queue_id]; in fs_tx_queue_setup()
595 dev->data->tx_queues[tx_queue_id] = NULL; in fs_tx_queue_setup()
612 dev->data->tx_queues[tx_queue_id] = txq; in fs_tx_queue_setup()
642 fs_tx_queue_release(dev->data->tx_queues[i]); in fs_dev_free_queues()
[all …]
/f-stack/dpdk/drivers/raw/ntb/
H A Dntb.c265 q_conf->tx_free_thresh = hw->tx_queues[queue_id]->tx_free_thresh; in ntb_queue_conf_get()
451 hw->tx_queues[qp_id] = txq; in ntb_txq_setup()
486 ntb_txq_release(hw->tx_queues[queue_id]); in ntb_queue_release()
487 hw->tx_queues[queue_id] = NULL; in ntb_queue_release()
506 struct ntb_tx_queue *txq = hw->tx_queues[qp_id]; in ntb_queue_init()
606 struct ntb_tx_queue *txq = hw->tx_queues[(size_t)context]; in ntb_enqueue_bufs()
876 hw->tx_queues = rte_zmalloc("ntb_tx_queues", in ntb_dev_configure()
889 rte_free(hw->tx_queues); in ntb_dev_configure()
891 hw->tx_queues = NULL; in ntb_dev_configure()
967 ntb_txq_release_mbufs(hw->tx_queues[i]); in ntb_dev_start()
[all …]
/f-stack/dpdk/drivers/net/ark/
H A Dark_ethdev_tx.c247 dev->data->tx_queues[queue_idx] = queue; in eth_ark_tx_queue_setup()
357 queue = dev->data->tx_queues[queue_id]; in eth_ark_tx_queue_stop()
379 queue = dev->data->tx_queues[queue_id]; in eth_ark_tx_queue_start()
H A Dark_ethdev.c521 ark->tx_queues = num_q; in ark_config_device()
693 for (i = 0; i < ark->tx_queues; i++) { in eth_ark_dev_stop()
755 eth_ark_tx_queue_release(dev->data->tx_queues[i]); in eth_ark_dev_close()
756 dev->data->tx_queues[i] = 0; in eth_ark_dev_close()
858 eth_tx_queue_stats_get(dev->data->tx_queues[i], stats); in eth_ark_dev_stats_get()
874 eth_tx_queue_stats_reset(dev->data->tx_queues[i]); in eth_ark_dev_stats_reset()
/f-stack/dpdk/drivers/net/ionic/
H A Dionic_rxtx.c62 struct ionic_qcq *txq = dev->data->tx_queues[queue_id]; in ionic_txq_info_get()
138 txq = eth_dev->data->tx_queues[tx_queue_id]; in ionic_dev_tx_queue_stop()
186 if (eth_dev->data->tx_queues[tx_queue_id] != NULL) { in ionic_dev_tx_queue_setup()
187 void *tx_queue = eth_dev->data->tx_queues[tx_queue_id]; in ionic_dev_tx_queue_setup()
189 eth_dev->data->tx_queues[tx_queue_id] = NULL; in ionic_dev_tx_queue_setup()
203 eth_dev->data->tx_queues[tx_queue_id] = txq; in ionic_dev_tx_queue_setup()
219 txq = eth_dev->data->tx_queues[tx_queue_id]; in ionic_dev_tx_queue_start()
/f-stack/dpdk/drivers/net/e1000/
H A Dem_rxtx.c1268 if (dev->data->tx_queues[queue_idx] != NULL) { in eth_em_tx_queue_setup()
1269 em_tx_queue_release(dev->data->tx_queues[queue_idx]); in eth_em_tx_queue_setup()
1270 dev->data->tx_queues[queue_idx] = NULL; in eth_em_tx_queue_setup()
1315 dev->data->tx_queues[queue_idx] = txq; in eth_em_tx_queue_setup()
1590 txq = dev->data->tx_queues[i]; in em_dev_clear_queues()
1619 eth_em_tx_queue_release(dev->data->tx_queues[i]); in em_dev_free_queues()
1620 dev->data->tx_queues[i] = NULL; in em_dev_free_queues()
1942 txq = dev->data->tx_queues[i]; in eth_em_tx_init()
2016 txq = dev->data->tx_queues[queue_id]; in em_txq_info_get()
2039 if (dev->data->tx_queues == NULL) in e1000_flush_tx_ring()
[all …]
/f-stack/dpdk/drivers/net/af_xdp/
H A Drte_eth_af_xdp.c135 struct pkt_tx_queue *tx_queues; member
808 memset(&internals->tx_queues[i].stats, 0, in eth_stats_reset()
1234 txq = &internals->tx_queues[tx_queue_id];
1236 dev->data->tx_queues[tx_queue_id] = txq;
1540 internals->tx_queues = rte_zmalloc_socket(NULL,
1543 if (internals->tx_queues == NULL) {
1548 internals->tx_queues[i].pair = &internals->rx_queues[i];
1549 internals->rx_queues[i].pair = &internals->tx_queues[i];
1551 internals->tx_queues[i].xsk_queue_idx = start_queue_idx + i;
1578 rte_free(internals->tx_queues);
/f-stack/dpdk/drivers/net/mvneta/
H A Dmvneta_rxtx.c775 if (dev->data->tx_queues[idx]) { in mvneta_tx_queue_setup()
776 rte_free(dev->data->tx_queues[idx]); in mvneta_tx_queue_setup()
777 dev->data->tx_queues[idx] = NULL; in mvneta_tx_queue_setup()
788 dev->data->tx_queues[idx] = txq; in mvneta_tx_queue_setup()
943 struct mvneta_txq *txq = dev->data->tx_queues[i]; in mvneta_flush_queues()
/f-stack/dpdk/drivers/event/octeontx2/
H A Dotx2_evdev_adptr.c518 txq = eth_dev->data->tx_queues[i]; in otx2_sso_tx_adapter_queue_add()
528 txq = eth_dev->data->tx_queues[tx_queue_id]; in otx2_sso_tx_adapter_queue_add()
555 txq = eth_dev->data->tx_queues[i]; in otx2_sso_tx_adapter_queue_del()
560 txq = eth_dev->data->tx_queues[tx_queue_id]; in otx2_sso_tx_adapter_queue_del()

12345