Home
last modified time | relevance | path

Searched refs:txq (Results 1 – 25 of 251) sorted by relevance

1234567891011

/f-stack/dpdk/drivers/net/bnxt/
H A Dbnxt_txq.c21 if (txq && txq->cp_ring && txq->cp_ring->hw_stats) in bnxt_free_txq_stats()
30 if (!txq || !txq->tx_ring) in bnxt_tx_queue_release_mbufs()
59 if (txq) { in bnxt_tx_queue_release_op()
80 txq->mz = NULL; in bnxt_tx_queue_release_op()
83 rte_free(txq); in bnxt_tx_queue_release_op()
115 if (txq) { in bnxt_tx_queue_setup_op()
117 txq = NULL; in bnxt_tx_queue_setup_op()
122 if (!txq) { in bnxt_tx_queue_setup_op()
130 if (!txq->free) { in bnxt_tx_queue_setup_op()
135 txq->bp = bp; in bnxt_tx_queue_setup_op()
[all …]
H A Dbnxt_txr.c29 if (!txq) in bnxt_free_tx_rings()
34 rte_free(txq->tx_ring); in bnxt_free_tx_rings()
38 rte_free(txq->cp_ring); in bnxt_free_tx_rings()
40 rte_free(txq); in bnxt_free_tx_rings()
67 txq->tx_ring = txr; in bnxt_init_tx_ring_struct()
88 txq->cp_ring = cpr; in bnxt_init_tx_ring_struct()
138 (txq->bp->tx_cfa_action || txq->vfr_tx_cfa_action))) in bnxt_start_xmit()
440 if (bnxt_tx_bds_in_hw(txq) < txq->tx_free_thresh) in bnxt_handle_tx_cp()
486 bnxt_handle_tx_cp(txq); in bnxt_xmit_pkts()
507 bnxt_db_write(&txq->tx_ring->tx_db, txq->tx_ring->tx_prod); in bnxt_xmit_pkts()
[all …]
/f-stack/dpdk/drivers/net/sfc/
H A Dsfc_ef10_tx.c98 *tx_ev = evq_hw_ring[txq->evq_read_ptr & txq->ptr_mask]; in sfc_ef10_tx_get_event()
203 EFX_POPULATE_QWORD_5(txq->txq_hw_ring[added & txq->ptr_mask], in sfc_ef10_tx_qdesc_tso2_create()
309 sfc_ef10_tx_qpush(txq, added, txq->added); in sfc_ef10_try_reap()
505 &txq->txq_hw_ring[(*added) & txq->ptr_mask]); in sfc_ef10_xmit_tso_pkt()
661 sfc_ef10_tx_qpush(txq, added, txq->added); in sfc_ef10_xmit_pkts()
713 sfc_ef10_tx_qpush(txq, added, txq->added); in sfc_ef10_xmit_pkts()
863 sfc_ef10_tx_qpush(txq, added, txq->added); in sfc_ef10_simple_xmit_pkts()
926 txq = rte_zmalloc_socket("sfc-ef10-txq", sizeof(*txq), in sfc_ef10_tx_qcreate()
952 txq->tsoh_iova = rte_malloc_virt2iova(txq->tsoh); in sfc_ef10_tx_qcreate()
1001 txq->added = txq->completed = txq_desc_index; in sfc_ef10_tx_qstart()
[all …]
H A Dsfc_ef100_tx.c223 *ev = evq_hw_ring[txq->evq_read_ptr & txq->ptr_mask]; in sfc_ef100_tx_get_event()
226 (txq->evq_read_ptr >> txq->evq_phase_bit_shift) & 1)) in sfc_ef100_tx_get_event()
238 txq->evq_read_ptr, txq->evq_read_ptr & txq->ptr_mask, in sfc_ef100_tx_get_event()
248 txq->evq_read_ptr & txq->ptr_mask); in sfc_ef100_tx_get_event()
279 txd = &txq->sw_ring[completed & txq->ptr_mask]; in sfc_ef100_tx_reap_num_descs()
778 txq = rte_zmalloc_socket("sfc-ef100-txq", sizeof(*txq), in sfc_ef100_tx_qcreate()
817 rte_free(txq); in sfc_ef100_tx_qcreate()
831 rte_free(txq); in sfc_ef100_tx_qdestroy()
842 txq->added = txq->completed = txq_desc_index; in sfc_ef100_tx_qstart()
884 txd = &txq->sw_ring[completed & txq->ptr_mask]; in sfc_ef100_tx_qreap()
[all …]
H A Dsfc_tx.c165 txq->evq = evq; in sfc_tx_qinit()
458 evq = txq->evq; in sfc_tx_qstart()
483 rc = efx_tx_qcreate(sa->nic, txq->hw_index, 0, &txq->mem, in sfc_tx_qstart()
675 txd = &txq->sw_ring[completed & txq->ptr_mask]; in sfc_efx_tx_reap()
902 txq->sw_ring[(added - 1) & txq->ptr_mask].mbuf = *pktp; in sfc_efx_xmit_pkts()
906 rc = efx_tx_qdesc_post(txq->common, txq->pend_desc, in sfc_efx_xmit_pkts()
908 txq->completed, &txq->added); in sfc_efx_xmit_pkts()
912 efx_tx_qpush(txq->common, txq->added, pushed); in sfc_efx_xmit_pkts()
998 txq = rte_zmalloc_socket("sfc-efx-txq", sizeof(*txq), in sfc_efx_tx_qcreate()
1075 txq->pending = txq->completed = txq->added = txq_desc_index; in sfc_efx_tx_qstart()
[all …]
/f-stack/dpdk/drivers/net/mlx4/
H A Dmlx4_txq.c48 txq_uar_init(struct txq *txq) in txq_uar_init() argument
55 ppriv->uar_table[txq->stats.idx] = txq->msq.db; in txq_uar_init()
74 txq_uar_init_secondary(struct txq *txq, int fd) in txq_uar_init_secondary() argument
94 txq->port_id, txq->stats.idx); in txq_uar_init_secondary()
110 txq_uar_uninit_secondary(struct txq *txq) in txq_uar_uninit_secondary() argument
135 struct txq *txq; in mlx4_tx_uar_init_secondary() local
179 mlx4_txq_free_elts(struct txq *txq) in mlx4_txq_free_elts() argument
195 txq->elts_tail = txq->elts_head; in mlx4_txq_free_elts()
294 struct txq *txq; in mlx4_tx_queue_setup() local
350 *txq = (struct txq){ in mlx4_tx_queue_setup()
[all …]
/f-stack/dpdk/drivers/net/axgbe/
H A Daxgbe_rxtx_vec_sse.c42 idx = AXGBE_GET_DESC_IDX(txq, txq->dirty + txq->free_batch_cnt in axgbe_xmit_cleanup_vec()
44 desc = &txq->desc[idx]; in axgbe_xmit_cleanup_vec()
54 txq->dirty += txq->free_batch_cnt; in axgbe_xmit_cleanup_vec()
55 txq->nb_desc_free += txq->free_batch_cnt; in axgbe_xmit_cleanup_vec()
64 struct axgbe_tx_queue *txq; in axgbe_xmit_pkts_vec() local
69 if (txq->nb_desc_free < txq->free_thresh) { in axgbe_xmit_pkts_vec()
70 axgbe_xmit_cleanup_vec(txq); in axgbe_xmit_pkts_vec()
76 idx = AXGBE_GET_DESC_IDX(txq, txq->cur); in axgbe_xmit_pkts_vec()
77 loop = txq->nb_desc - idx; in axgbe_xmit_pkts_vec()
90 txq->cur += nb_pkts; in axgbe_xmit_pkts_vec()
[all …]
H A Daxgbe_rxtx.c518 if (txq->free_thresh > txq->nb_desc) in axgbe_dev_tx_queue_setup()
519 txq->free_thresh = (txq->nb_desc >> 1); in axgbe_dev_tx_queue_setup()
520 txq->free_batch_cnt = txq->free_thresh; in axgbe_dev_tx_queue_setup()
523 if (txq->nb_desc % txq->free_thresh != 0) in axgbe_dev_tx_queue_setup()
548 txq->nb_desc_free = txq->nb_desc; in axgbe_dev_tx_queue_setup()
695 while (txq->cur != txq->dirty) { in axgbe_xmit_cleanup()
721 idx = AXGBE_GET_DESC_IDX(txq, txq->cur); in axgbe_xmit_hw()
780 nb_desc_free = txq->nb_desc - (txq->cur - txq->dirty); in axgbe_xmit_pkts()
784 nb_desc_free = txq->nb_desc - (txq->cur - txq->dirty); in axgbe_xmit_pkts()
798 idx = AXGBE_GET_DESC_IDX(txq, txq->cur); in axgbe_xmit_pkts()
[all …]
/f-stack/dpdk/drivers/net/ixgbe/
H A Dixgbe_rxtx_vec_common.h86 status = txq->tx_ring[txq->tx_next_dd].wb.status; in ixgbe_tx_free_bufs()
96 txep = &txq->sw_ring_v[txq->tx_next_dd - (n - 1)]; in ixgbe_tx_free_bufs()
124 txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_rs_thresh); in ixgbe_tx_free_bufs()
125 txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_rs_thresh); in ixgbe_tx_free_bufs()
126 if (txq->tx_next_dd >= txq->nb_tx_desc) in ixgbe_tx_free_bufs()
127 txq->tx_next_dd = (uint16_t)(txq->tx_rs_thresh - 1); in ixgbe_tx_free_bufs()
153 for (i = txq->tx_next_dd - (txq->tx_rs_thresh - 1); in _ixgbe_tx_queue_release_mbufs_vec()
229 txq->tx_next_dd = (uint16_t)(txq->tx_rs_thresh - 1); in _ixgbe_reset_tx_queue_vec()
230 txq->tx_next_rs = (uint16_t)(txq->tx_rs_thresh - 1); in _ixgbe_reset_tx_queue_vec()
239 txq->nb_tx_free = (uint16_t)(txq->nb_tx_desc - 1); in _ixgbe_reset_tx_queue_vec()
[all …]
H A Dixgbe_rxtx.c119 txep = &(txq->sw_ring[txq->tx_next_dd - (txq->tx_rs_thresh - 1)]); in ixgbe_tx_free_bufs()
143 txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_rs_thresh); in ixgbe_tx_free_bufs()
144 txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_rs_thresh); in ixgbe_tx_free_bufs()
590 txq->port_id, txq->queue_id); in ixgbe_xmit_cleanup()
607 txq->port_id, txq->queue_id); in ixgbe_xmit_cleanup()
2382 if (txq->nb_tx_desc - txq->nb_tx_free < txq->tx_rs_thresh) in ixgbe_tx_done_cleanup_simple()
2436 txq->ops->free_swring(txq); in ixgbe_tx_queue_release()
2760 txq->sw_ring, txq->tx_ring, txq->tx_ring_phys_addr); in ixgbe_dev_tx_queue_setup()
2765 txq->ops->reset(txq); in ixgbe_dev_tx_queue_setup()
3345 txq->ops->reset(txq); in ixgbe_dev_clear_queues()
[all …]
/f-stack/dpdk/drivers/net/nfb/
H A Dnfb_tx.c16 if (txq->queue == NULL) { in nfb_eth_tx_queue_start()
37 if (txq->queue == NULL) { in nfb_eth_tx_queue_stop()
64 if (txq == NULL) { in nfb_eth_tx_queue_setup()
72 txq); in nfb_eth_tx_queue_setup()
77 rte_free(txq); in nfb_eth_tx_queue_setup()
94 txq->nfb = nfb; in nfb_eth_tx_queue_init()
97 txq->tx_pkts = 0; in nfb_eth_tx_queue_init()
98 txq->tx_bytes = 0; in nfb_eth_tx_queue_init()
99 txq->err_pkts = 0; in nfb_eth_tx_queue_init()
110 rte_free(txq); in nfb_eth_tx_queue_release()
[all …]
/f-stack/dpdk/drivers/net/mlx5/
H A Dmlx5_txq.c138 cqe = &txq->cqes[txq->cq_ci & txq->cqe_m]; in txq_sync_cq()
139 ret = check_cqe(cqe, txq->cqe_s, txq->cq_ci); in txq_sync_cq()
156 *txq->cq_db = rte_cpu_to_be_32(txq->cq_ci); in txq_sync_cq()
157 txq->cq_pi = txq->cq_ci; in txq_sync_cq()
179 container_of(txq, struct mlx5_txq_ctrl, txq); in mlx5_tx_queue_stop_primary()
193 txq->wqe_ci = txq->wqe_s; in mlx5_tx_queue_stop_primary()
252 container_of(txq, struct mlx5_txq_ctrl, txq); in mlx5_tx_queue_start_primary()
377 container_of(txq, struct mlx5_txq_ctrl, txq); in mlx5_tx_queue_setup()
418 container_of(txq, struct mlx5_txq_ctrl, txq); in mlx5_tx_hairpin_queue_setup()
570 struct mlx5_txq_data *txq = &txq_ctrl->txq; in txq_uar_init_secondary() local
[all …]
H A Dmlx5_rxtx.c2042 tail = txq->fcqs[(txq->cq_ci - 1) & txq->cqe_m]; in mlx5_tx_comp_flush()
2078 cqe = &txq->cqes[txq->cq_ci & txq->cqe_m]; in mlx5_tx_handle_completion()
2180 txq->fcqs[txq->cq_pi++ & txq->cqe_m] = head | in mlx5_tx_request_completion()
2183 txq->fcqs[txq->cq_pi++ & txq->cqe_m] = head; in mlx5_tx_request_completion()
3165 wqe = txq->wqes + (txq->wqe_ci & txq->wqe_m); in mlx5_tx_schedule_send()
3263 wqe = txq->wqes + (txq->wqe_ci & txq->wqe_m); in mlx5_tx_packet_multi_tso()
3347 wqe = txq->wqes + (txq->wqe_ci & txq->wqe_m); in mlx5_tx_packet_multi_send()
3550 wqe = txq->wqes + (txq->wqe_ci & txq->wqe_m); in mlx5_tx_packet_multi_inline()
3757 wqe = txq->wqes + (txq->wqe_ci & txq->wqe_m); in mlx5_tx_burst_tso()
4712 txq->elts[txq->elts_head++ & txq->elts_m] = in mlx5_tx_burst_single_send()
[all …]
/f-stack/dpdk/drivers/net/ice/
H A Dice_rxtx_vec_common.h85 if ((txq->tx_ring[txq->tx_next_dd].cmd_type_offset_bsz & in ice_tx_free_bufs()
90 n = txq->tx_rs_thresh; in ice_tx_free_bufs()
95 txep = &txq->sw_ring[txq->tx_next_dd - (n - 1)]; in ice_tx_free_bufs()
124 txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_rs_thresh); in ice_tx_free_bufs()
125 txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_rs_thresh); in ice_tx_free_bufs()
126 if (txq->tx_next_dd >= txq->nb_tx_desc) in ice_tx_free_bufs()
127 txq->tx_next_dd = (uint16_t)(txq->tx_rs_thresh - 1); in ice_tx_free_bufs()
182 if (unlikely(!txq || !txq->sw_ring)) { in _ice_tx_queue_release_mbufs_vec()
191 i = txq->tx_next_dd - txq->tx_rs_thresh + 1; in _ice_tx_queue_release_mbufs_vec()
199 if (txq->tx_tail < i) { in _ice_tx_queue_release_mbufs_vec()
[all …]
H A Dice_rxtx.c630 if (!txq || !txq->q_set) { in ice_tx_queue_start()
809 txq = pf->fdir.txq; in ice_fdir_tx_queue_start()
810 if (!txq || !txq->q_set) { in ice_fdir_tx_queue_start()
867 if (!txq || !txq->sw_ring) { in _ice_tx_queue_release_mbufs()
954 txq->tx_rel_mbufs(txq); in ice_tx_queue_stop()
994 txq = pf->fdir.txq; in ice_fdir_tx_queue_stop()
1013 txq->tx_rel_mbufs(txq); in ice_fdir_tx_queue_stop()
2107 pf->fdir.txq = txq; in ice_fdir_setup_tx_resources()
2408 txq->port_id, txq->queue_id, in ice_xmit_cleanup()
2838 if (txq->nb_tx_desc - txq->nb_tx_free < txq->tx_rs_thresh) in ice_tx_done_cleanup_simple()
[all …]
/f-stack/dpdk/drivers/net/iavf/
H A Diavf_rxtx_vec_common.h88 if ((txq->tx_ring[txq->next_dd].cmd_type_offset_bsz & in iavf_tx_free_bufs()
93 n = txq->rs_thresh; in iavf_tx_free_bufs()
98 txep = &txq->sw_ring[txq->next_dd - (n - 1)]; in iavf_tx_free_bufs()
127 txq->nb_free = (uint16_t)(txq->nb_free + txq->rs_thresh); in iavf_tx_free_bufs()
128 txq->next_dd = (uint16_t)(txq->next_dd + txq->rs_thresh); in iavf_tx_free_bufs()
129 if (txq->next_dd >= txq->nb_tx_desc) in iavf_tx_free_bufs()
130 txq->next_dd = (uint16_t)(txq->rs_thresh - 1); in iavf_tx_free_bufs()
132 return txq->rs_thresh; in iavf_tx_free_bufs()
181 if (!txq->sw_ring || txq->nb_free == max_desc) in _iavf_tx_queue_release_mbufs_vec()
184 i = txq->next_dd - txq->rs_thresh + 1; in _iavf_tx_queue_release_mbufs_vec()
[all …]
/f-stack/dpdk/drivers/net/hns3/
H A Dhns3_rxtx_vec.h12 hns3_tx_bulk_free_buffers(struct hns3_tx_queue *txq) in hns3_tx_bulk_free_buffers() argument
14 struct rte_mbuf **free = txq->free; in hns3_tx_bulk_free_buffers()
20 tx_entry = &txq->sw_ring[txq->next_to_clean]; in hns3_tx_bulk_free_buffers()
21 for (i = 0; i < txq->tx_rs_thresh; i++, tx_entry++) { in hns3_tx_bulk_free_buffers()
40 txq->tx_bd_ready += txq->tx_rs_thresh; in hns3_tx_bulk_free_buffers()
41 txq->next_to_clean += txq->tx_rs_thresh; in hns3_tx_bulk_free_buffers()
42 if (txq->next_to_clean >= txq->nb_tx_desc) in hns3_tx_bulk_free_buffers()
43 txq->next_to_clean = 0; in hns3_tx_bulk_free_buffers()
47 hns3_tx_free_buffers(struct hns3_tx_queue *txq) in hns3_tx_free_buffers() argument
56 tx_desc = &txq->tx_ring[txq->next_to_clean]; in hns3_tx_free_buffers()
[all …]
H A Dhns3_rxtx.c96 if (txq) { in hns3_tx_queue_release()
98 if (txq->mz) in hns3_tx_queue_release()
463 if (!txq) { in hns3_start_all_txqs()
540 hns3_enable_txq(txq, txq->enabled); in hns3_restore_tqp_enable_state()
553 if (!txq) in hns3_stop_all_txqs()
1012 txq->tx_bd_ready = txq->nb_tx_desc - 1; in hns3_init_txq()
1476 txq = rte_realloc(txq, sizeof(txq[0]) * nb_queues, in hns3_fake_tx_queue_config()
2590 txq->tx_bd_ready = txq->nb_tx_desc - 1; in hns3_tx_queue_setup()
3428 tx_entry = &txq->sw_ring[txq->next_to_clean]; in hns3_tx_free_buffer_simple()
3438 txq->tx_bd_ready += txq->tx_rs_thresh; in hns3_tx_free_buffer_simple()
[all …]
/f-stack/dpdk/drivers/net/bnx2x/
H A Dbnx2x_rxtx.c200 txq = p_txq; in bnx2x_xmit_pkts()
201 sc = txq->sc; in bnx2x_xmit_pkts()
204 if ((unlikely((txq->nb_tx_desc - txq->nb_tx_avail) > in bnx2x_xmit_pkts()
225 if ((txq->nb_tx_desc - txq->nb_tx_avail) > in bnx2x_xmit_pkts()
251 if (txq == NULL) in bnx2x_dev_tx_queue_setup()
253 txq->sc = sc; in bnx2x_dev_tx_queue_setup()
259 txq->nb_tx_desc = TOTAL_TX_BD(txq); in bnx2x_dev_tx_queue_setup()
264 txq->tx_free_thresh = min(txq->tx_free_thresh, in bnx2x_dev_tx_queue_setup()
271 (unsigned long)TOTAL_TX_BD(txq), txq->nb_tx_pages); in bnx2x_dev_tx_queue_setup()
313 txq->nb_tx_avail = txq->nb_tx_desc; in bnx2x_dev_tx_queue_setup()
[all …]
/f-stack/dpdk/drivers/net/e1000/
H A Dem_rxtx.c317 txq->port_id, txq->queue_id); in em_xmit_cleanup()
396 if (txq->nb_tx_free < txq->tx_free_thresh) in eth_em_xmit_pkts()
464 txq->port_id, txq->queue_id); in eth_em_xmit_pkts()
577 txq->nb_tx_used = (uint16_t)(txq->nb_tx_used + nb_used); in eth_em_xmit_pkts()
581 if (txq->nb_tx_used >= txq->tx_rs_thresh) { in eth_em_xmit_pkts()
585 tx_last, txq->port_id, txq->queue_id); in eth_em_xmit_pkts()
601 (unsigned) txq->port_id, (unsigned) txq->queue_id, in eth_em_xmit_pkts()
1285 if ((txq = rte_zmalloc("ethdev TX queue", sizeof(*txq), in eth_em_tx_queue_setup()
1311 txq->sw_ring, txq->tx_ring, txq->tx_ring_phys_addr); in eth_em_tx_queue_setup()
2049 tx_desc = &txq->tx_ring[txq->tx_tail]; in e1000_flush_tx_ring()
[all …]
/f-stack/dpdk/drivers/net/atlantic/
H A Datl_rxtx.c201 if (!txq) { in atl_reset_tx_queue()
221 txq->tx_free = txq->nb_tx_desc - 1; in atl_reset_tx_queue()
255 txq = rte_zmalloc_socket("atlantic Tx queue", sizeof(*txq), in atl_tx_queue_setup()
276 txq->port_id, txq->queue_id); in atl_tx_queue_setup()
277 rte_free(txq); in atl_tx_queue_setup()
292 txq->port_id, txq->queue_id); in atl_tx_queue_setup()
294 rte_free(txq); in atl_tx_queue_setup()
329 txq->port_id, txq->queue_id); in atl_tx_init()
1324 txq->port_id, txq->queue_id, nb_pkts, txq->tx_free, in atl_xmit_pkts()
1325 txq->tx_tail, txq->tx_head); in atl_xmit_pkts()
[all …]
/f-stack/dpdk/drivers/net/netvsc/
H A Dhn_rxtx.c231 txq->agg_szleft = txq->agg_szmax; in hn_reset_txagg()
232 txq->agg_pktleft = txq->agg_pktmax; in hn_reset_txagg()
268 if (!txq) in hn_dev_tx_queue_setup()
290 txq->tx_rndis = txq->tx_rndis_mz->addr; in hn_dev_tx_queue_setup()
291 txq->tx_rndis_iova = txq->tx_rndis_mz->iova; in hn_dev_tx_queue_setup()
365 if (!txq) in hn_dev_tx_queue_release()
387 hn_process_events(txq->hv, txq->queue_id, 0); in hn_dev_tx_descriptor_status()
1203 txq->port_id, txq->queue_id); in hn_flush_txagg()
1209 txq->port_id, txq->queue_id, ret); in hn_flush_txagg()
1273 txq->agg_pktleft = txq->agg_pktmax - 1; in hn_try_txagg()
[all …]
/f-stack/dpdk/drivers/net/i40e/
H A Di40e_rxtx.c352 txq->port_id, txq->queue_id); in i40e_xmit_cleanup()
1270 txep = &(txq->sw_ring[txq->tx_next_dd - (txq->tx_rs_thresh - 1)]); in i40e_tx_free_bufs()
1287 txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_rs_thresh); in i40e_tx_free_bufs()
1288 txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_rs_thresh); in i40e_tx_free_bufs()
1390 if (txq->tx_tail > txq->tx_next_rs) { in tx_xmit_pkts()
1400 if (txq->tx_tail >= txq->nb_tx_desc) in tx_xmit_pkts()
1649 if (!txq || !txq->q_set) { in i40e_dev_tx_queue_start()
1682 if (!txq || !txq->q_set) { in i40e_dev_tx_queue_stop()
2496 if (!txq || !txq->sw_ring) { in i40e_tx_queue_release_mbufs()
2602 if (txq->nb_tx_desc - txq->nb_tx_free < txq->tx_rs_thresh) in i40e_tx_done_cleanup_simple()
[all …]
/f-stack/dpdk/drivers/net/txgbe/
H A Dtxgbe_rxtx.c114 txep = &txq->sw_ring[txq->tx_next_dd - (txq->tx_free_thresh - 1)]; in txgbe_tx_free_bufs()
137 txq->nb_tx_free = (uint16_t)(txq->nb_tx_free + txq->tx_free_thresh); in txgbe_tx_free_bufs()
138 txq->tx_next_dd = (uint16_t)(txq->tx_next_dd + txq->tx_free_thresh); in txgbe_tx_free_bufs()
607 txq->port_id, txq->queue_id); in txgbe_xmit_cleanup()
627 txq->port_id, txq->queue_id); in txgbe_xmit_cleanup()
2048 txq->ops->release_mbufs(txq); in txgbe_tx_queue_release()
2049 txq->ops->free_swring(txq); in txgbe_tx_queue_release()
2291 txq->sw_ring, txq->tx_ring, txq->tx_ring_phys_addr); in txgbe_dev_tx_queue_setup()
2296 txq->ops->reset(txq); in txgbe_dev_tx_queue_setup()
2707 txq->ops->reset(txq); in txgbe_dev_clear_queues()
[all …]
/f-stack/dpdk/drivers/net/hinic/
H A Dhinic_pmd_tx.c175 #define HINIC_GET_SQ_WQE_MASK(txq) ((txq)->wq->mask) argument
178 ((be16_to_cpu(*(txq)->cons_idx_addr)) & HINIC_GET_SQ_WQE_MASK(txq))
181 (((txq)->wq->cons_idx) & HINIC_GET_SQ_WQE_MASK(txq))
189 #define HINIC_GET_SQ_FREE_WQEBBS(txq) ((txq)->wq->delta - 1) argument
191 #define HINIC_IS_SQ_EMPTY(txq) (((txq)->wq->delta) == ((txq)->q_depth)) argument
273 memcpy(stats, &txq->txq_stats, sizeof(txq->txq_stats)); in hinic_txq_get_stats()
280 if (txq == NULL) in hinic_txq_stats_reset()
1139 if (HINIC_GET_SQ_FREE_WQEBBS(txq) < txq->tx_free_thresh) in hinic_xmit_pkts()
1176 hinic_return_sq_wqe(txq->nic_dev->hwdev, txq->q_id, in hinic_xmit_pkts()
1206 hinic_sq_write_db(txq->sq, txq->cos); in hinic_xmit_pkts()
[all …]

1234567891011