Home
last modified time | relevance | path

Searched refs:nb_rx_desc (Results 1 – 25 of 104) sorted by relevance

12345

/f-stack/dpdk/drivers/net/i40e/
H A Di40e_rxtx_vec_common.h147 const unsigned mask = rxq->nb_rx_desc - 1; in _i40e_rx_queue_release_mbufs_vec()
150 if (rxq->sw_ring == NULL || rxq->rxrearm_nb >= rxq->nb_rx_desc) in _i40e_rx_queue_release_mbufs_vec()
155 for (i = 0; i < rxq->nb_rx_desc; i++) { in _i40e_rx_queue_release_mbufs_vec()
168 rxq->rxrearm_nb = rxq->nb_rx_desc; in _i40e_rx_queue_release_mbufs_vec()
171 memset(rxq->sw_ring, 0, sizeof(rxq->sw_ring[0]) * rxq->nb_rx_desc); in _i40e_rx_queue_release_mbufs_vec()
226 desc = rxq->nb_rx_desc; in i40e_rx_vec_dev_conf_condition_check_default()
243 desc = rxq->nb_rx_desc; in i40e_rx_vec_dev_conf_condition_check_default()
H A Di40e_rxtx.c816 if (rx_id == rxq->nb_rx_desc) in i40e_recv_scattered_pkts()
1891 rxq->nb_rx_desc = nb_desc; in i40e_dev_rx_queue_setup()
2017 while ((desc < rxq->nb_rx_desc) && in i40e_dev_rx_queue_count()
2030 desc - rxq->nb_rx_desc]); in i40e_dev_rx_queue_count()
2050 if (desc >= rxq->nb_rx_desc) in i40e_dev_rx_descriptor_done()
2051 desc -= rxq->nb_rx_desc; in i40e_dev_rx_descriptor_done()
2077 if (desc >= rxq->nb_rx_desc) in i40e_dev_rx_descriptor_status()
2078 desc -= rxq->nb_rx_desc; in i40e_dev_rx_descriptor_status()
2467 len = rxq->nb_rx_desc; in i40e_reset_rx_queue()
2847 rx_ctx.qlen = rxq->nb_rx_desc; in i40e_rx_queue_init()
[all …]
H A Di40e_rxtx_vec_altivec.c42 rxq->nb_rx_desc) { in i40e_rxq_rearm()
92 if (rxq->rxrearm_start >= rxq->nb_rx_desc) in i40e_rxq_rearm()
98 (rxq->nb_rx_desc - 1) : (rxq->rxrearm_start - 1)); in i40e_rxq_rearm()
443 rxq->rx_tail = (uint16_t)(rxq->rx_tail & (rxq->nb_rx_desc - 1)); in _recv_raw_pkts_vec()
/f-stack/dpdk/drivers/net/iavf/
H A Diavf_rxtx_vec_common.h148 const unsigned int mask = rxq->nb_rx_desc - 1; in _iavf_rx_queue_release_mbufs_vec()
151 if (!rxq->sw_ring || rxq->rxrearm_nb >= rxq->nb_rx_desc) in _iavf_rx_queue_release_mbufs_vec()
156 for (i = 0; i < rxq->nb_rx_desc; i++) { in _iavf_rx_queue_release_mbufs_vec()
169 rxq->rxrearm_nb = rxq->nb_rx_desc; in _iavf_rx_queue_release_mbufs_vec()
172 memset(rxq->sw_ring, 0, sizeof(rxq->sw_ring[0]) * rxq->nb_rx_desc); in _iavf_rx_queue_release_mbufs_vec()
218 if (!rte_is_power_of_2(rxq->nb_rx_desc)) in iavf_rx_vec_queue_default()
224 if (rxq->nb_rx_desc % rxq->rx_free_thresh) in iavf_rx_vec_queue_default()
H A Diavf_rxtx.c246 for (i = 0; i < rxq->nb_rx_desc; i++) { in alloc_rxq_mbufs()
284 for (i = 0; i < rxq->nb_rx_desc; i++) { in release_rxq_mbufs()
549 rxq->nb_rx_desc = nb_desc; in iavf_dev_rx_queue_setup()
1351 if (rx_id == rxq->nb_rx_desc) in iavf_recv_scattered_pkts_flex_rxd()
1504 if (rx_id == rxq->nb_rx_desc) in iavf_recv_scattered_pkts()
1887 if (rxq->rx_tail >= rxq->nb_rx_desc) in rx_recv_pkts()
2552 qinfo->nb_desc = rxq->nb_rx_desc; in iavf_dev_rxq_info_get()
2587 while ((desc < rxq->nb_rx_desc) && in iavf_dev_rxq_count()
2599 desc - rxq->nb_rx_desc]); in iavf_dev_rxq_count()
2620 if (desc >= rxq->nb_rx_desc) in iavf_dev_rx_desc_status()
[all …]
/f-stack/dpdk/drivers/net/ice/
H A Dice_rxtx_vec_common.h145 const unsigned int mask = rxq->nb_rx_desc - 1; in _ice_rx_queue_release_mbufs_vec()
153 if (rxq->rxrearm_nb >= rxq->nb_rx_desc) in _ice_rx_queue_release_mbufs_vec()
158 for (i = 0; i < rxq->nb_rx_desc; i++) { in _ice_rx_queue_release_mbufs_vec()
171 rxq->rxrearm_nb = rxq->nb_rx_desc; in _ice_rx_queue_release_mbufs_vec()
174 memset(rxq->sw_ring, 0, sizeof(rxq->sw_ring[0]) * rxq->nb_rx_desc); in _ice_rx_queue_release_mbufs_vec()
251 if (!rte_is_power_of_2(rxq->nb_rx_desc)) in ice_rx_vec_queue_default()
257 if (rxq->nb_rx_desc % rxq->rx_free_thresh) in ice_rx_vec_queue_default()
H A Dice_rxtx.c273 rx_ctx.qlen = rxq->nb_rx_desc; in ice_program_hw_rx_queue()
360 for (i = 0; i < rxq->nb_rx_desc; i++) { in ice_alloc_rx_queue_mbufs()
402 for (i = 0; i < rxq->nb_rx_desc; i++) { in _ice_rx_queue_release_mbufs()
699 rx_ctx.qlen = rxq->nb_rx_desc; in ice_fdir_program_hw_rx_queue()
1061 rxq->nb_rx_desc = nb_desc; in ice_rx_queue_setup()
1363 qinfo->nb_desc = rxq->nb_rx_desc; in ice_rxq_info_get()
1400 while ((desc < rxq->nb_rx_desc) && in ice_rx_queue_count()
1412 desc - rxq->nb_rx_desc]); in ice_rx_queue_count()
1674 if (rxq->rx_tail >= rxq->nb_rx_desc) in rx_recv_pkts()
1985 if (desc >= rxq->nb_rx_desc) in ice_rx_descriptor_status()
[all …]
/f-stack/dpdk/drivers/net/atlantic/
H A Datl_rxtx.c57 uint16_t nb_rx_desc; member
92 for (i = 0; i < rxq->nb_rx_desc; i++) { in atl_reset_rx_queue()
113 if (nb_rx_desc < AQ_HW_MIN_RX_RING_SIZE || in atl_rx_queue_setup()
114 nb_rx_desc > AQ_HW_MAX_RX_RING_SIZE) { in atl_rx_queue_setup()
142 rxq->nb_rx_desc = nb_rx_desc; in atl_rx_queue_setup()
375 rxq->nb_rx_desc, buff_size, 0, in atl_rx_init()
403 for (i = 0; i < rxq->nb_rx_desc; i++) { in atl_alloc_rx_queue_mbufs()
673 qinfo->nb_desc = rxq->nb_rx_desc; in atl_rxq_info_get()
728 if (idx >= rxq->nb_rx_desc) in atl_dev_rx_descriptor_status()
729 idx -= rxq->nb_rx_desc; in atl_dev_rx_descriptor_status()
[all …]
/f-stack/dpdk/drivers/net/hns3/
H A Dhns3_rxtx_vec.c92 if (rxq->rx_rearm_start >= rxq->nb_rx_desc) in hns3_rxq_rearm_mbuf()
155 struct hns3_entry *sw_ring = &rxq->sw_ring[rxq->nb_rx_desc]; in hns3_rxq_vec_setup()
158 memset(&rxq->rx_ring[rxq->nb_rx_desc], 0, in hns3_rxq_vec_setup()
177 if (rxq->nb_rx_desc < min_vec_bds) in hns3_rxq_vec_check()
180 if (rxq->nb_rx_desc % HNS3_DEFAULT_RXQ_REARM_THRESH) in hns3_rxq_vec_check()
/f-stack/dpdk/drivers/net/ixgbe/
H A Dixgbe_rxtx_vec_common.h171 const unsigned int mask = rxq->nb_rx_desc - 1; in _ixgbe_rx_queue_release_mbufs_vec()
174 if (rxq->sw_ring == NULL || rxq->rxrearm_nb >= rxq->nb_rx_desc) in _ixgbe_rx_queue_release_mbufs_vec()
179 for (i = 0; i < rxq->nb_rx_desc; i++) { in _ixgbe_rx_queue_release_mbufs_vec()
192 rxq->rxrearm_nb = rxq->nb_rx_desc; in _ixgbe_rx_queue_release_mbufs_vec()
195 memset(rxq->sw_ring, 0, sizeof(rxq->sw_ring[0]) * rxq->nb_rx_desc); in _ixgbe_rx_queue_release_mbufs_vec()
H A Dixgbe_vf_representor.c93 __rte_unused uint16_t nb_rx_desc, in ixgbe_vf_representor_rx_queue_setup() argument
104 __rte_unused uint16_t nb_rx_desc, in ixgbe_vf_representor_tx_queue_setup() argument
H A Dixgbe_rxtx.c1829 if (rx_id == rxq->nb_rx_desc) in ixgbe_recv_pkts()
2122 if (next_id == rxq->nb_rx_desc) in ixgbe_recv_pkts_lro()
2902 uint16_t len = rxq->nb_rx_desc; in ixgbe_reset_rx_queue()
3065 rxq->nb_rx_desc = nb_desc; in ixgbe_dev_rx_queue_setup()
3212 while ((desc < rxq->nb_rx_desc) && in ixgbe_dev_rx_queue_count()
3219 desc - rxq->nb_rx_desc]); in ixgbe_dev_rx_queue_count()
3235 if (desc >= rxq->nb_rx_desc) in ixgbe_dev_rx_descriptor_done()
3236 desc -= rxq->nb_rx_desc; in ixgbe_dev_rx_descriptor_done()
3263 if (desc >= rxq->nb_rx_desc) in ixgbe_dev_rx_descriptor_status()
3264 desc -= rxq->nb_rx_desc; in ixgbe_dev_rx_descriptor_status()
[all …]
/f-stack/dpdk/drivers/net/e1000/
H A Dem_rxtx.c762 if (rx_id == rxq->nb_rx_desc) in eth_em_recv_pkts()
941 if (rx_id == rxq->nb_rx_desc) in eth_em_recv_scattered_pkts()
1464 rxq->nb_rx_desc = nb_desc; in eth_em_rx_queue_setup()
1502 while ((desc < rxq->nb_rx_desc) && in eth_em_rx_queue_count()
1508 desc - rxq->nb_rx_desc]); in eth_em_rx_queue_count()
1524 if (desc >= rxq->nb_rx_desc) in eth_em_rx_descriptor_done()
1525 desc -= rxq->nb_rx_desc; in eth_em_rx_descriptor_done()
1545 if (desc >= rxq->nb_rx_desc) in eth_em_rx_descriptor_status()
1546 desc -= rxq->nb_rx_desc; in eth_em_rx_descriptor_status()
1816 rxq->nb_rx_desc * in eth_em_rx_init()
[all …]
H A Digb_rxtx.c889 if (rx_id == rxq->nb_rx_desc) in eth_igb_recv_pkts()
1080 if (rx_id == rxq->nb_rx_desc) in eth_igb_recv_scattered_pkts()
1719 rxq->nb_rx_desc = nb_desc; in eth_igb_rx_queue_setup()
1788 desc - rxq->nb_rx_desc]); in eth_igb_rx_queue_count()
1804 if (desc >= rxq->nb_rx_desc) in eth_igb_rx_descriptor_done()
1805 desc -= rxq->nb_rx_desc; in eth_igb_rx_descriptor_done()
1825 if (desc >= rxq->nb_rx_desc) in eth_igb_rx_descriptor_status()
1826 desc -= rxq->nb_rx_desc; in eth_igb_rx_descriptor_status()
2394 rxq->nb_rx_desc * in eth_igb_rx_init()
2684 rxq->nb_rx_desc * in eth_igbvf_rx_init()
[all …]
/f-stack/dpdk/drivers/net/bnx2x/
H A Dbnx2x_rxtx.h34 uint16_t nb_rx_desc; /**< number of RX descriptors. */ member
67 uint16_t nb_rx_desc, unsigned int socket_id,
H A Dbnx2x_rxtx.c29 for (i = 0; i < rx_queue->nb_rx_desc; i++) { in bnx2x_rx_queue_release()
80 rxq->nb_rx_desc = TOTAL_RX_BD(rxq); in bnx2x_dev_rx_queue_setup()
91 dma_size = rxq->nb_rx_desc * sizeof(struct eth_rx_bd); in bnx2x_dev_rx_queue_setup()
110 dma_size = rxq->nb_rx_desc * sizeof(struct bnx2x_rx_entry); in bnx2x_dev_rx_queue_setup()
121 for (idx = 0; idx < rxq->nb_rx_desc; idx = NEXT_RX_BD(idx)) { in bnx2x_dev_rx_queue_setup()
136 rxq->rx_bd_tail = rxq->nb_rx_desc; in bnx2x_dev_rx_queue_setup()
/f-stack/dpdk/drivers/net/igc/
H A Digc_txrx.c424 if (rx_id >= rxq->nb_rx_desc) in igc_recv_pkts()
568 if (rx_id >= rxq->nb_rx_desc) in igc_recv_scattered_pkts()
750 while (desc < rxq->nb_rx_desc && in eth_igc_rx_queue_count()
769 if (desc >= rxq->nb_rx_desc) in eth_igc_rx_descriptor_done()
770 desc -= rxq->nb_rx_desc; in eth_igc_rx_descriptor_done()
790 if (desc >= rxq->nb_rx_desc) in eth_igc_rx_descriptor_status()
791 desc -= rxq->nb_rx_desc; in eth_igc_rx_descriptor_status()
1137 rxq->nb_rx_desc * in igc_rx_init()
1296 rxq->nb_rx_desc - 1); in igc_rx_init()
1367 rxq->nb_rx_desc = nb_desc; in eth_igc_rx_queue_setup()
[all …]
H A Digc_txrx.h21 uint16_t nb_rx_desc, unsigned int socket_id,
/f-stack/dpdk/lib/librte_ethdev/
H A Drte_ethdev_trace.h42 uint16_t nb_rx_desc, void *mp,
46 rte_trace_point_emit_u16(nb_rx_desc);
/f-stack/dpdk/drivers/raw/ntb/
H A Dntb.c282 for (i = 0; i < q->nb_rx_desc; i++) { in ntb_rxq_release_mbufs()
332 rxq->nb_rx_desc = rxq_conf->nb_desc; in ntb_rxq_setup()
342 rxq->nb_rx_desc, in ntb_rxq_setup()
537 for (i = 0; i < rxq->nb_rx_desc - 1; i++) { in ntb_queue_init()
551 *rxq->avail_cnt = rxq->nb_rx_desc - 1; in ntb_queue_init()
552 rxq->last_avail = rxq->nb_rx_desc - 1; in ntb_queue_init()
768 (rxq->nb_rx_desc - 1); in ntb_dequeue_bufs()
786 (rxq->nb_rx_desc - 1); in ntb_dequeue_bufs()
798 if (nb_mbufs > rxq->nb_rx_desc - last_avail) { in ntb_dequeue_bufs()
799 nb1 = rxq->nb_rx_desc - last_avail; in ntb_dequeue_bufs()
[all …]
/f-stack/dpdk/drivers/net/bnxt/
H A Dbnxt_rxtx_vec_common.h68 rxq->nb_rx_desc - rxq->rxrearm_start); in bnxt_rxq_rearm()
92 if (rxq->rxrearm_start >= rxq->nb_rx_desc) in bnxt_rxq_rearm()
H A Dbnxt_rxq.h21 uint16_t nb_rx_desc; /* num of RX desc */ member
/f-stack/dpdk/drivers/net/enetc/
H A Denetc_ethdev.c367 uint16_t nb_rx_desc) in enetc_alloc_rxbdr() argument
371 size = nb_rx_desc * sizeof(struct enetc_swbd); in enetc_alloc_rxbdr()
376 size = nb_rx_desc * sizeof(union enetc_rx_bd); in enetc_alloc_rxbdr()
384 rxr->bd_count = nb_rx_desc; in enetc_alloc_rxbdr()
422 uint16_t nb_rx_desc, in enetc_rx_queue_setup() argument
435 if (nb_rx_desc > MAX_BD_COUNT) in enetc_rx_queue_setup()
445 err = enetc_alloc_rxbdr(rx_ring, nb_rx_desc); in enetc_rx_queue_setup()
/f-stack/dpdk/drivers/net/txgbe/
H A Dtxgbe_rxtx.c1323 if (rxq->rx_tail >= rxq->nb_rx_desc) in txgbe_rx_recv_pkts()
1449 if (rx_id == rxq->nb_rx_desc) in txgbe_recv_pkts()
1721 if (next_id == rxq->nb_rx_desc) in txgbe_recv_pkts_lro()
2352 for (i = 0; i < rxq->nb_rx_desc; i++) in txgbe_rx_queue_release_mbufs()
2427 uint16_t len = rxq->nb_rx_desc; in txgbe_reset_rx_queue()
2507 rxq->nb_rx_desc = nb_desc; in txgbe_dev_rx_queue_setup()
2632 while ((desc < rxq->nb_rx_desc) && in txgbe_dev_rx_queue_count()
2639 desc - rxq->nb_rx_desc]); in txgbe_dev_rx_queue_count()
2660 if (desc >= rxq->nb_rx_desc) in txgbe_dev_rx_descriptor_status()
2661 desc -= rxq->nb_rx_desc; in txgbe_dev_rx_descriptor_status()
[all …]
/f-stack/dpdk/drivers/net/qede/
H A Dqede_rxtx.h19 #define NUM_RX_BDS(q) (q->nb_rx_desc - 1)
191 uint16_t nb_rx_desc; member

12345