Home
last modified time | relevance | path

Searched refs:rx_queue (Results 1 – 25 of 149) sorted by relevance

123456

/f-stack/dpdk/drivers/net/nfb/
H A Dnfb_stats.c23 struct ndp_rx_queue *rx_queue = *((struct ndp_rx_queue **) in nfb_eth_stats_get() local
30 stats->q_ipackets[i] = rx_queue[i].rx_pkts; in nfb_eth_stats_get()
31 stats->q_ibytes[i] = rx_queue[i].rx_bytes; in nfb_eth_stats_get()
33 rx_total += rx_queue[i].rx_pkts; in nfb_eth_stats_get()
34 rx_total_bytes += rx_queue[i].rx_bytes; in nfb_eth_stats_get()
62 struct ndp_rx_queue *rx_queue = *((struct ndp_rx_queue **) in nfb_eth_stats_reset() local
68 rx_queue[i].rx_pkts = 0; in nfb_eth_stats_reset()
69 rx_queue[i].rx_bytes = 0; in nfb_eth_stats_reset()
70 rx_queue[i].err_pkts = 0; in nfb_eth_stats_reset()
/f-stack/dpdk/drivers/net/af_packet/
H A Drte_eth_af_packet.c79 struct pkt_rx_queue *rx_queue; member
294 internals->rx_queue[i].sockfd = -1; in eth_dev_stop()
367 internal->rx_queue[i].rx_pkts = 0; in eth_stats_reset()
396 munmap(internals->rx_queue[q].map, in eth_dev_close()
402 rte_free(internals->rx_queue); in eth_dev_close()
626 struct pkt_rx_queue *rx_queue; in rte_pmd_init_internals() local
780 rx_queue = &((*internals)->rx_queue[q]); in rte_pmd_init_internals()
797 if (rx_queue->rd == NULL) in rte_pmd_init_internals()
800 rx_queue->rd[i].iov_base = rx_queue->map + (i * framesize); in rte_pmd_init_internals()
803 rx_queue->sockfd = qsockfd; in rte_pmd_init_internals()
[all …]
/f-stack/dpdk/drivers/net/virtio/
H A Dvirtio_ethdev.h80 uint16_t virtio_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
82 uint16_t virtio_recv_pkts_packed(void *rx_queue, struct rte_mbuf **rx_pkts,
85 uint16_t virtio_recv_mergeable_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
88 uint16_t virtio_recv_mergeable_pkts_packed(void *rx_queue,
91 uint16_t virtio_recv_pkts_inorder(void *rx_queue,
105 uint16_t virtio_recv_pkts_vec(void *rx_queue, struct rte_mbuf **rx_pkts,
108 uint16_t virtio_recv_pkts_packed_vec(void *rx_queue, struct rte_mbuf **rx_pkts,
/f-stack/dpdk/drivers/net/iavf/
H A Diavf_rxtx.h414 uint16_t iavf_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
416 uint16_t iavf_recv_pkts_flex_rxd(void *rx_queue,
419 uint16_t iavf_recv_scattered_pkts(void *rx_queue,
422 uint16_t iavf_recv_scattered_pkts_flex_rxd(void *rx_queue,
436 int iavf_dev_rx_desc_status(void *rx_queue, uint16_t offset);
443 uint16_t iavf_recv_scattered_pkts_vec(void *rx_queue,
446 uint16_t iavf_recv_scattered_pkts_vec_flex_rxd(void *rx_queue,
453 uint16_t iavf_recv_pkts_vec_avx2_flex_rxd(void *rx_queue,
456 uint16_t iavf_recv_scattered_pkts_vec_avx2(void *rx_queue,
472 uint16_t iavf_recv_pkts_vec_avx512_flex_rxd(void *rx_queue,
[all …]
H A Diavf_rxtx_vec_sse.c929 iavf_recv_pkts_vec(void *rx_queue, struct rte_mbuf **rx_pkts, in iavf_recv_pkts_vec() argument
932 return _recv_raw_pkts_vec(rx_queue, rx_pkts, nb_pkts, NULL); in iavf_recv_pkts_vec()
941 iavf_recv_pkts_vec_flex_rxd(void *rx_queue, struct rte_mbuf **rx_pkts, in iavf_recv_pkts_vec_flex_rxd() argument
957 struct iavf_rx_queue *rxq = rx_queue; in iavf_recv_scattered_burst_vec()
1000 burst = iavf_recv_scattered_burst_vec(rx_queue, in iavf_recv_scattered_pkts_vec()
1009 return retval + iavf_recv_scattered_burst_vec(rx_queue, in iavf_recv_scattered_pkts_vec()
1022 iavf_recv_scattered_burst_vec_flex_rxd(void *rx_queue, in iavf_recv_scattered_burst_vec_flex_rxd() argument
1026 struct iavf_rx_queue *rxq = rx_queue; in iavf_recv_scattered_burst_vec_flex_rxd()
1061 iavf_recv_scattered_pkts_vec_flex_rxd(void *rx_queue, in iavf_recv_scattered_pkts_vec_flex_rxd() argument
1070 burst = iavf_recv_scattered_burst_vec_flex_rxd(rx_queue, in iavf_recv_scattered_pkts_vec_flex_rxd()
[all …]
/f-stack/dpdk/drivers/net/thunderx/
H A Dnicvf_rxtx.h93 uint16_t nicvf_recv_pkts_vlan_strip(void *rx_queue, struct rte_mbuf **rx_pkts,
95 uint16_t nicvf_recv_pkts_cksum_vlan_strip(void *rx_queue,
98 uint16_t nicvf_recv_pkts_multiseg_no_offload(void *rx_queue,
100 uint16_t nicvf_recv_pkts_multiseg_cksum(void *rx_queue,
102 uint16_t nicvf_recv_pkts_multiseg_vlan_strip(void *rx_queue,
104 uint16_t nicvf_recv_pkts_multiseg_cksum_vlan_strip(void *rx_queue,
H A Dnicvf_rxtx.c428 struct nicvf_rxq *rxq = rx_queue; in nicvf_recv_pkts()
491 return nicvf_recv_pkts(rx_queue, rx_pkts, nb_pkts, in nicvf_recv_pkts_no_offload()
499 return nicvf_recv_pkts(rx_queue, rx_pkts, nb_pkts, in nicvf_recv_pkts_cksum()
507 return nicvf_recv_pkts(rx_queue, rx_pkts, nb_pkts, in nicvf_recv_pkts_vlan_strip()
515 return nicvf_recv_pkts(rx_queue, rx_pkts, nb_pkts, in nicvf_recv_pkts_cksum_vlan_strip()
581 struct nicvf_rxq *rxq = rx_queue; in nicvf_recv_pkts_multiseg()
623 return nicvf_recv_pkts_multiseg(rx_queue, rx_pkts, nb_pkts, in nicvf_recv_pkts_multiseg_no_offload()
631 return nicvf_recv_pkts_multiseg(rx_queue, rx_pkts, nb_pkts, in nicvf_recv_pkts_multiseg_cksum()
639 return nicvf_recv_pkts_multiseg(rx_queue, rx_pkts, nb_pkts, in nicvf_recv_pkts_multiseg_vlan_strip()
644 nicvf_recv_pkts_multiseg_cksum_vlan_strip(void *rx_queue, in nicvf_recv_pkts_multiseg_cksum_vlan_strip() argument
[all …]
/f-stack/dpdk/examples/l3fwd-power/
H A Dmain.c871 port_id = rx_queue->port_id; in turn_on_off_intr()
894 portid = rx_queue->port_id; in event_register()
981 rx_queue->idle_hint = 0; in main_intr_loop()
982 portid = rx_queue->port_id; in main_intr_loop()
1137 portid = rx_queue->port_id; in main_telemetry_loop()
1251 rx_queue->idle_hint = 0; in main_empty_poll_loop()
1252 portid = rx_queue->port_id; in main_empty_poll_loop()
1378 rx_queue->idle_hint = 0; in main_legacy_loop()
1379 portid = rx_queue->port_id; in main_legacy_loop()
1410 rx_queue->freq_up_hint = in main_legacy_loop()
[all …]
/f-stack/dpdk/drivers/crypto/qat/
H A Dqat_sym_hw_dp.c749 head = (head + rx_queue->msg_size) & rx_queue->modulo_mask; in qat_sym_dp_dequeue_burst()
768 head = (head + rx_queue->msg_size) & in qat_sym_dp_dequeue_burst()
769 rx_queue->modulo_mask; in qat_sym_dp_dequeue_burst()
782 head = (head + rx_queue->msg_size) & in qat_sym_dp_dequeue_burst()
783 rx_queue->modulo_mask; in qat_sym_dp_dequeue_burst()
810 rx_queue->modulo_mask; in qat_sym_dp_dequeue()
854 rx_queue->head = dp_ctx->head; in qat_sym_dp_update_head()
862 old_head = rx_queue->csr_head; in qat_sym_dp_update_head()
863 new_head = rx_queue->head; in qat_sym_dp_update_head()
879 rx_queue->csr_head = new_head; in qat_sym_dp_update_head()
[all …]
/f-stack/dpdk/drivers/net/ark/
H A Dark_ethdev_rx.h27 uint16_t eth_ark_recv_pkts_noop(void *rx_queue, struct rte_mbuf **rx_pkts,
29 uint16_t eth_ark_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
31 void eth_ark_dev_rx_queue_release(void *rx_queue);
/f-stack/dpdk/drivers/net/octeontx2/
H A Dotx2_rx.c45 nix_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts, in nix_recv_pkts() argument
48 struct otx2_eth_rxq *rxq = rx_queue; in nix_recv_pkts()
114 nix_recv_pkts_vector(void *rx_queue, struct rte_mbuf **rx_pkts, in nix_recv_pkts_vector() argument
117 struct otx2_eth_rxq *rxq = rx_queue; uint16_t packets = 0; in nix_recv_pkts_vector()
311 packets += nix_recv_pkts(rx_queue, &rx_pkts[packets], in nix_recv_pkts_vector()
320 nix_recv_pkts_vector(void *rx_queue, struct rte_mbuf **rx_pkts, in nix_recv_pkts_vector() argument
323 RTE_SET_USED(rx_queue); in nix_recv_pkts_vector()
335 otx2_nix_recv_pkts_ ## name(void *rx_queue, \
342 otx2_nix_recv_pkts_mseg_ ## name(void *rx_queue, \
345 return nix_recv_pkts(rx_queue, rx_pkts, pkts, \
[all …]
/f-stack/dpdk/drivers/net/ice/
H A Dice_rxtx.h214 uint16_t ice_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
233 int ice_rx_descriptor_status(void *rx_queue, uint16_t offset);
244 uint16_t ice_recv_pkts_vec(void *rx_queue, struct rte_mbuf **rx_pkts,
246 uint16_t ice_recv_scattered_pkts_vec(void *rx_queue, struct rte_mbuf **rx_pkts,
250 uint16_t ice_recv_pkts_vec_avx2(void *rx_queue, struct rte_mbuf **rx_pkts,
252 uint16_t ice_recv_scattered_pkts_vec_avx2(void *rx_queue,
257 uint16_t ice_recv_pkts_vec_avx512(void *rx_queue, struct rte_mbuf **rx_pkts,
259 uint16_t ice_recv_scattered_pkts_vec_avx512(void *rx_queue,
/f-stack/dpdk/lib/librte_eventdev/
H A Drte_event_eth_rx_adapter.c289 return dev_info->rx_queue && in rxa_intr_queue()
302 dev_info->rx_queue && in rxa_polled_queue()
585 &dev_info->rx_queue[q]; in rxa_calc_wrr_sequence()
1773 rx_queue = dev_info->rx_queue; in rxa_sw_add()
1777 dev_info->rx_queue = in rxa_sw_add()
1862 if (rx_queue == NULL) { in rxa_sw_add()
1863 rte_free(dev_info->rx_queue); in rxa_sw_add()
1864 dev_info->rx_queue = NULL; in rxa_sw_add()
2105 dev_info->rx_queue = in rte_event_eth_rx_adapter_queue_add()
2198 dev_info->rx_queue = NULL; in rte_event_eth_rx_adapter_queue_del()
[all …]
/f-stack/dpdk/drivers/net/axgbe/
H A Daxgbe_rxtx.h180 uint16_t axgbe_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
182 uint16_t eth_axgbe_recv_scattered_pkts(void *rx_queue,
184 uint16_t axgbe_recv_pkts_threshold_refresh(void *rx_queue,
188 int axgbe_dev_rx_descriptor_status(void *rx_queue, uint16_t offset);
H A Daxgbe_rxtx.c16 axgbe_rx_queue_release(struct axgbe_rx_queue *rx_queue) in axgbe_rx_queue_release() argument
21 if (rx_queue) { in axgbe_rx_queue_release()
22 sw_ring = rx_queue->sw_ring; in axgbe_rx_queue_release()
24 for (i = 0; i < rx_queue->nb_desc; i++) { in axgbe_rx_queue_release()
30 rte_free(rx_queue); in axgbe_rx_queue_release()
203 axgbe_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts, in axgbe_recv_pkts() argument
208 struct axgbe_rx_queue *rxq = rx_queue; in axgbe_recv_pkts()
316 uint16_t eth_axgbe_recv_scattered_pkts(void *rx_queue, in eth_axgbe_recv_scattered_pkts() argument
321 struct axgbe_rx_queue *rxq = rx_queue; in eth_axgbe_recv_scattered_pkts()
834 axgbe_dev_rx_descriptor_status(void *rx_queue, uint16_t offset) in axgbe_dev_rx_descriptor_status() argument
[all …]
/f-stack/dpdk/drivers/common/qat/
H A Dqat_qp.c848 struct qat_queue *rx_queue; in qat_dequeue_op_burst() local
855 rx_queue = &(tmp_qp->rx_q); in qat_dequeue_op_burst()
856 head = rx_queue->head; in qat_dequeue_op_burst()
857 resp_msg = (uint8_t *)rx_queue->base_addr + rx_queue->head; in qat_dequeue_op_burst()
869 tmp_qp->op_cookies[head >> rx_queue->trailz], in qat_dequeue_op_burst()
874 tmp_qp->op_cookies[head >> rx_queue->trailz]); in qat_dequeue_op_burst()
877 head = adf_modulo(head + rx_queue->msg_size, in qat_dequeue_op_burst()
878 rx_queue->modulo_mask); in qat_dequeue_op_burst()
898 rx_queue->nb_processed_responses++; in qat_dequeue_op_burst()
904 rx_queue->head = head; in qat_dequeue_op_burst()
[all …]
/f-stack/dpdk/drivers/net/fm10k/
H A Dfm10k.h320 uint16_t fm10k_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
323 uint16_t fm10k_recv_scattered_pkts(void *rx_queue,
330 fm10k_dev_rx_descriptor_done(void *rx_queue, uint16_t offset);
333 fm10k_dev_rx_descriptor_status(void *rx_queue, uint16_t offset);
336 fm10k_dev_tx_descriptor_status(void *rx_queue, uint16_t offset);
/f-stack/dpdk/drivers/bus/fslmc/portal/
H A Ddpaa2_hw_dpci.c84 rxq = &(dpci_node->rx_queue[i]); in rte_dpaa2_create_dpci_device()
121 dpci_node->rx_queue[i].fqid = rx_attr.fqid; in rte_dpaa2_create_dpci_device()
144 struct dpaa2_queue *rxq = &(dpci_node->rx_queue[i]); in rte_dpaa2_create_dpci_device()
/f-stack/dpdk/drivers/net/octeontx/
H A Docteontx_rxtx.c23 octeontx_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts, uint16_t nb_pkts) in octeontx_recv_pkts() argument
30 rxq = rx_queue; in octeontx_recv_pkts()
/f-stack/dpdk/drivers/net/igc/
H A Digc_txrx.h28 int eth_igc_rx_descriptor_done(void *rx_queue, uint16_t offset);
30 int eth_igc_rx_descriptor_status(void *rx_queue, uint16_t offset);
/f-stack/dpdk/drivers/net/tap/
H A Drte_eth_tap.h45 struct rx_queue { struct
90 struct rx_queue rxq[RTE_PMD_TAP_MAX_QUEUES]; /* List of RX queues */
/f-stack/dpdk/drivers/net/e1000/
H A De1000_ethdev.h405 int eth_igb_rx_descriptor_done(void *rx_queue, uint16_t offset);
407 int eth_igb_rx_descriptor_status(void *rx_queue, uint16_t offset);
482 int eth_em_rx_descriptor_done(void *rx_queue, uint16_t offset);
484 int eth_em_rx_descriptor_status(void *rx_queue, uint16_t offset);
504 uint16_t eth_em_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
507 uint16_t eth_em_recv_scattered_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
/f-stack/dpdk/drivers/net/i40e/
H A Di40e_rxtx.h198 uint16_t i40e_recv_pkts(void *rx_queue,
201 uint16_t i40e_recv_scattered_pkts(void *rx_queue,
224 int i40e_dev_rx_descriptor_done(void *rx_queue, uint16_t offset);
225 int i40e_dev_rx_descriptor_status(void *rx_queue, uint16_t offset);
228 uint16_t i40e_recv_pkts_vec(void *rx_queue, struct rte_mbuf **rx_pkts,
230 uint16_t i40e_recv_scattered_pkts_vec(void *rx_queue,
245 uint16_t i40e_recv_pkts_vec_avx2(void *rx_queue, struct rte_mbuf **rx_pkts,
247 uint16_t i40e_recv_scattered_pkts_vec_avx2(void *rx_queue,
H A Di40e_rxtx_vec_altivec.c455 i40e_recv_pkts_vec(void *rx_queue, struct rte_mbuf **rx_pkts, in i40e_recv_pkts_vec() argument
458 return _recv_raw_pkts_vec(rx_queue, rx_pkts, nb_pkts, NULL); in i40e_recv_pkts_vec()
468 i40e_recv_scattered_burst_vec(void *rx_queue, struct rte_mbuf **rx_pkts, in i40e_recv_scattered_burst_vec() argument
471 struct i40e_rx_queue *rxq = rx_queue; in i40e_recv_scattered_burst_vec()
506 i40e_recv_scattered_pkts_vec(void *rx_queue, struct rte_mbuf **rx_pkts, in i40e_recv_scattered_pkts_vec() argument
514 burst = i40e_recv_scattered_burst_vec(rx_queue, in i40e_recv_scattered_pkts_vec()
523 return retval + i40e_recv_scattered_burst_vec(rx_queue, in i40e_recv_scattered_pkts_vec()
/f-stack/dpdk/drivers/net/txgbe/
H A Dtxgbe_ethdev.h225 int txgbe_dev_rx_descriptor_status(void *rx_queue, uint16_t offset);
253 uint16_t txgbe_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
256 uint16_t txgbe_recv_pkts_bulk_alloc(void *rx_queue, struct rte_mbuf **rx_pkts,
259 uint16_t txgbe_recv_pkts_lro_single_alloc(void *rx_queue,
261 uint16_t txgbe_recv_pkts_lro_bulk_alloc(void *rx_queue,

123456