| /f-stack/dpdk/drivers/net/virtio/ |
| H A D | virtio_ethdev.h | 81 uint16_t nb_pkts); 83 uint16_t nb_pkts); 86 uint16_t nb_pkts); 95 uint16_t nb_pkts); 98 uint16_t nb_pkts); 100 uint16_t nb_pkts); 103 uint16_t nb_pkts); 106 uint16_t nb_pkts); 109 uint16_t nb_pkts); 112 uint16_t nb_pkts); [all …]
|
| /f-stack/dpdk/drivers/net/nfb/ |
| H A D | nfb_rx.h | 142 uint16_t nb_pkts) in nfb_eth_ndp_rx() argument 154 struct ndp_packet packets[nb_pkts]; in nfb_eth_ndp_rx() 156 struct rte_mbuf *mbufs[nb_pkts]; in nfb_eth_ndp_rx() 158 if (unlikely(ndp->queue == NULL || nb_pkts == 0)) { in nfb_eth_ndp_rx() 166 i = rte_pktmbuf_alloc_bulk(ndp->mb_pool, mbufs, nb_pkts); in nfb_eth_ndp_rx() 170 num_rx = ndp_rx_burst_get(ndp->queue, packets, nb_pkts); in nfb_eth_ndp_rx() 172 if (unlikely(num_rx != nb_pkts)) { in nfb_eth_ndp_rx() 173 for (i = num_rx; i < nb_pkts; i++) in nfb_eth_ndp_rx() 177 nb_pkts = num_rx; in nfb_eth_ndp_rx() 185 for (i = 0; i < nb_pkts; ++i) { in nfb_eth_ndp_rx()
|
| H A D | nfb_tx.h | 123 uint16_t nb_pkts) in nfb_eth_ndp_tx() argument 135 struct ndp_packet packets[nb_pkts]; in nfb_eth_ndp_tx() 137 if (unlikely(ndp->queue == NULL || nb_pkts == 0)) { in nfb_eth_ndp_tx() 142 for (i = 0; i < nb_pkts; i++) { in nfb_eth_ndp_tx() 147 num_tx = ndp_tx_burst_get(ndp->queue, packets, nb_pkts); in nfb_eth_ndp_tx() 149 if (unlikely(num_tx != nb_pkts)) in nfb_eth_ndp_tx() 152 for (i = 0; i < nb_pkts; ++i) { in nfb_eth_ndp_tx() 189 ndp->err_pkts += nb_pkts - num_tx; in nfb_eth_ndp_tx()
|
| /f-stack/dpdk/drivers/net/bnxt/ |
| H A D | bnxt_rxtx_vec_sse.c | 143 uint16_t nb_pkts) in bnxt_recv_pkts_vec() argument 169 nb_pkts = RTE_MIN(nb_pkts, RTE_BNXT_MAX_RX_BURST); in bnxt_recv_pkts_vec() 179 nb_pkts = RTE_MIN(nb_pkts, RTE_MIN(rx_ring_size - mbcons, in bnxt_recv_pkts_vec() 190 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, RTE_BNXT_DESCS_PER_LOOP); in bnxt_recv_pkts_vec() 351 uint16_t nb_pkts) in bnxt_xmit_fixed_burst_vec() argument 366 nb_pkts = RTE_MIN(nb_pkts, bnxt_tx_avail(txq)); in bnxt_xmit_fixed_burst_vec() 372 to_send = nb_pkts; in bnxt_xmit_fixed_burst_vec() 403 return nb_pkts; in bnxt_xmit_fixed_burst_vec() 408 uint16_t nb_pkts) in bnxt_xmit_pkts_vec() argument 425 while (nb_pkts) { in bnxt_xmit_pkts_vec() [all …]
|
| H A D | bnxt_rxtx_vec_neon.c | 151 uint16_t nb_pkts) in bnxt_recv_pkts_vec() argument 177 nb_pkts = RTE_MIN(nb_pkts, RTE_BNXT_MAX_RX_BURST); in bnxt_recv_pkts_vec() 187 nb_pkts = RTE_MIN(nb_pkts, RTE_MIN(rx_ring_size - mbcons, in bnxt_recv_pkts_vec() 198 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, RTE_BNXT_DESCS_PER_LOOP); in bnxt_recv_pkts_vec() 352 uint16_t nb_pkts) in bnxt_xmit_fixed_burst_vec() argument 362 nb_pkts = RTE_MIN(nb_pkts, bnxt_tx_avail(txq)); in bnxt_xmit_fixed_burst_vec() 368 to_send = nb_pkts; in bnxt_xmit_fixed_burst_vec() 397 return nb_pkts; in bnxt_xmit_fixed_burst_vec() 402 uint16_t nb_pkts) in bnxt_xmit_pkts_vec() argument 417 while (nb_pkts) { in bnxt_xmit_pkts_vec() [all …]
|
| /f-stack/dpdk/drivers/net/hns3/ |
| H A D | hns3_rxtx_vec_sve.c | 78 uint16_t nb_pkts, in hns3_recv_burst_vec_sve() argument 286 uint16_t nb_pkts) in hns3_recv_pkts_vec_sve() argument 295 nb_pkts = RTE_MIN(nb_pkts, HNS3_DEFAULT_RX_BURST); in hns3_recv_pkts_vec_sve() 296 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, HNS3_SVE_DEFAULT_DESCS_PER_LOOP); in hns3_recv_pkts_vec_sve() 358 uint16_t nb_pkts) in hns3_tx_fill_hw_ring_sve() argument 423 uint16_t nb_pkts) in hns3_xmit_fixed_burst_vec_sve() argument 431 nb_pkts = RTE_MIN(txq->tx_bd_ready, nb_pkts); in hns3_xmit_fixed_burst_vec_sve() 449 return nb_pkts; in hns3_xmit_fixed_burst_vec_sve() 455 uint16_t nb_pkts) in hns3_xmit_pkts_vec_sve() argument 461 while (nb_pkts) { in hns3_xmit_pkts_vec_sve() [all …]
|
| H A D | hns3_rxtx_vec.c | 29 hns3_xmit_pkts_vec(void *tx_queue, struct rte_mbuf **tx_pkts, uint16_t nb_pkts) in hns3_xmit_pkts_vec() argument 34 while (nb_pkts) { in hns3_xmit_pkts_vec() 37 new_burst = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in hns3_xmit_pkts_vec() 41 nb_pkts -= ret; in hns3_xmit_pkts_vec() 103 uint16_t nb_pkts) in hns3_recv_pkts_vec() argument 110 nb_pkts = RTE_MIN(nb_pkts, HNS3_DEFAULT_RX_BURST); in hns3_recv_pkts_vec() 111 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, HNS3_DEFAULT_DESCS_PER_LOOP); in hns3_recv_pkts_vec() 128 nb_rx = hns3_recv_burst_vec(rxq, rx_pkts, nb_pkts, &bd_err_mask); in hns3_recv_pkts_vec()
|
| /f-stack/dpdk/drivers/net/axgbe/ |
| H A D | axgbe_rxtx_vec_sse.c | 60 uint16_t nb_pkts) in axgbe_xmit_pkts_vec() argument 74 nb_pkts = RTE_MIN(txq->nb_desc_free, nb_pkts); in axgbe_xmit_pkts_vec() 75 nb_commit = nb_pkts; in axgbe_xmit_pkts_vec() 90 txq->cur += nb_pkts; in axgbe_xmit_pkts_vec() 95 txq->pkts += nb_pkts; in axgbe_xmit_pkts_vec() 96 txq->nb_desc_free -= nb_pkts; in axgbe_xmit_pkts_vec() 98 return nb_pkts; in axgbe_xmit_pkts_vec()
|
| /f-stack/dpdk/drivers/net/iavf/ |
| H A D | iavf_rxtx.h | 418 uint16_t nb_pkts); 421 uint16_t nb_pkts); 424 uint16_t nb_pkts); 440 uint16_t nb_pkts); 450 uint16_t nb_pkts); 452 uint16_t nb_pkts); 455 uint16_t nb_pkts); 463 uint16_t nb_pkts); 465 uint16_t nb_pkts); 471 uint16_t nb_pkts); [all …]
|
| H A D | iavf_rxtx_vec_sse.c | 419 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, IAVF_VPMD_DESCS_PER_LOOP); in _recv_raw_pkts_vec() 694 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, IAVF_VPMD_DESCS_PER_LOOP); in _recv_raw_pkts_vec_flex_rxd() 930 uint16_t nb_pkts) in iavf_recv_pkts_vec() argument 1004 nb_pkts -= burst; in iavf_recv_scattered_pkts_vec() 1011 nb_pkts); in iavf_recv_scattered_pkts_vec() 1074 nb_pkts -= burst; in iavf_recv_scattered_pkts_vec_flex_rxd() 1121 nb_pkts = RTE_MIN(nb_pkts, txq->rs_thresh); in iavf_xmit_fixed_burst_vec() 1126 nb_pkts = (uint16_t)RTE_MIN(txq->nb_free, nb_pkts); in iavf_xmit_fixed_burst_vec() 1176 return nb_pkts; in iavf_xmit_fixed_burst_vec() 1186 while (nb_pkts) { in iavf_xmit_pkts_vec() [all …]
|
| H A D | iavf_rxtx_vec_avx2.c | 157 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, IAVF_DESCS_PER_LOOP_AVX); in _iavf_recv_raw_pkts_vec_avx2() 654 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, IAVF_DESCS_PER_LOOP_AVX); in _iavf_recv_raw_pkts_vec_avx2_flex_rxd() 1307 nb_pkts -= burst; in iavf_recv_scattered_pkts_vec_avx2() 1376 nb_pkts -= burst; in iavf_recv_scattered_pkts_vec_avx2_flex_rxd() 1412 for (; nb_pkts > 3; txdp += 4, pkt += 4, nb_pkts -= 4) { in iavf_vtx() 1447 while (nb_pkts) { in iavf_vtx() 1466 nb_pkts = RTE_MIN(nb_pkts, txq->rs_thresh); in iavf_xmit_fixed_burst_vec_avx2() 1471 nb_commit = nb_pkts = (uint16_t)RTE_MIN(txq->nb_free, nb_pkts); in iavf_xmit_fixed_burst_vec_avx2() 1518 return nb_pkts; in iavf_xmit_fixed_burst_vec_avx2() 1528 while (nb_pkts) { in iavf_xmit_pkts_vec_avx2() [all …]
|
| /f-stack/dpdk/lib/librte_bpf/ |
| H A D | bpf_pkt.c | 276 struct rte_mbuf *pkt[], uint16_t nb_pkts, in bpf_rx_callback_vm() argument 287 nb_pkts; in bpf_rx_callback_vm() 294 struct rte_mbuf *pkt[], uint16_t nb_pkts, in bpf_rx_callback_jit() argument 304 nb_pkts; in bpf_rx_callback_jit() 320 nb_pkts; in bpf_tx_callback_vm() 336 nb_pkts; in bpf_tx_callback_jit() 347 struct rte_mbuf *pkt[], uint16_t nb_pkts, in bpf_rx_callback_mb_vm() argument 357 nb_pkts; in bpf_rx_callback_mb_vm() 374 nb_pkts; in bpf_rx_callback_mb_jit() 390 nb_pkts; in bpf_tx_callback_mb_vm() [all …]
|
| /f-stack/dpdk/drivers/net/failsafe/ |
| H A D | failsafe_rxtx.c | 71 failsafe_rx_set_port(struct rte_mbuf **rx_pkts, uint16_t nb_pkts, uint16_t port) in failsafe_rx_set_port() argument 75 for (i = 0; i != nb_pkts; ++i) in failsafe_rx_set_port() 82 uint16_t nb_pkts) in failsafe_rx_burst() argument 100 rx_pkt_burst(sub_rxq, rx_pkts, nb_pkts); in failsafe_rx_burst() 114 uint16_t nb_pkts) in failsafe_rx_burst_fast() argument 128 rx_pkt_burst(sub_rxq, rx_pkts, nb_pkts); in failsafe_rx_burst_fast() 142 uint16_t nb_pkts) in failsafe_tx_burst() argument 155 nb_tx = ETH(sdev)->tx_pkt_burst(sub_txq, tx_pkts, nb_pkts); in failsafe_tx_burst() 163 uint16_t nb_pkts) in failsafe_tx_burst_fast() argument 175 nb_tx = ETH(sdev)->tx_pkt_burst(sub_txq, tx_pkts, nb_pkts); in failsafe_tx_burst_fast()
|
| /f-stack/dpdk/drivers/net/ice/ |
| H A D | ice_rxtx_vec_sse.c | 319 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, ICE_DESCS_PER_LOOP); in _ice_recv_raw_pkts_vec() 556 uint16_t nb_pkts) in ice_recv_pkts_vec() argument 619 nb_pkts -= burst; in ice_recv_scattered_pkts_vec() 626 nb_pkts); in ice_recv_scattered_pkts_vec() 655 uint16_t nb_pkts) in ice_xmit_fixed_burst_vec() argument 666 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ice_xmit_fixed_burst_vec() 671 nb_pkts = (uint16_t)RTE_MIN(txq->nb_tx_free, nb_pkts); in ice_xmit_fixed_burst_vec() 672 nb_commit = nb_pkts; in ice_xmit_fixed_burst_vec() 718 return nb_pkts; in ice_xmit_fixed_burst_vec() 728 while (nb_pkts) { in ice_xmit_pkts_vec() [all …]
|
| H A D | ice_rxtx_vec_avx2.c | 170 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, ICE_DESCS_PER_LOOP_AVX); in _ice_recv_raw_pkts_vec_avx2() 776 uint16_t nb_pkts) in ice_recv_scattered_burst_vec_avx2() argument 818 uint16_t nb_pkts) in ice_recv_scattered_pkts_vec_avx2() argument 826 nb_pkts -= burst; in ice_recv_scattered_pkts_vec_avx2() 862 for (; nb_pkts > 3; txdp += 4, pkt += 4, nb_pkts -= 4) { in ice_vtx() 897 while (nb_pkts) { in ice_vtx() 915 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ice_xmit_fixed_burst_vec_avx2() 920 nb_commit = nb_pkts = (uint16_t)RTE_MIN(txq->nb_tx_free, nb_pkts); in ice_xmit_fixed_burst_vec_avx2() 967 return nb_pkts; in ice_xmit_fixed_burst_vec_avx2() 977 while (nb_pkts) { in ice_xmit_pkts_vec_avx2() [all …]
|
| H A D | ice_rxtx.h | 215 uint16_t nb_pkts); 217 uint16_t nb_pkts); 220 uint16_t nb_pkts); 245 uint16_t nb_pkts); 247 uint16_t nb_pkts); 249 uint16_t nb_pkts); 251 uint16_t nb_pkts); 254 uint16_t nb_pkts); 256 uint16_t nb_pkts); 258 uint16_t nb_pkts); [all …]
|
| H A D | ice_rxtx_vec_avx512.c | 164 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, ICE_DESCS_PER_LOOP_AVX); in _ice_recv_raw_pkts_vec_avx512() 751 uint16_t nb_pkts) in ice_recv_pkts_vec_avx512() argument 813 nb_pkts -= burst; in ice_recv_scattered_pkts_vec_avx512() 950 for (; nb_pkts > 3; txdp += 4, pkt += 4, nb_pkts -= 4) { in ice_vtx() 982 while (nb_pkts) { in ice_vtx() 1000 uint16_t nb_pkts) in ice_xmit_fixed_burst_vec_avx512() argument 1010 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ice_xmit_fixed_burst_vec_avx512() 1015 nb_commit = nb_pkts = (uint16_t)RTE_MIN(txq->nb_tx_free, nb_pkts); in ice_xmit_fixed_burst_vec_avx512() 1063 return nb_pkts; in ice_xmit_fixed_burst_vec_avx512() 1073 while (nb_pkts) { in ice_xmit_pkts_vec_avx512() [all …]
|
| /f-stack/dpdk/drivers/net/i40e/ |
| H A D | i40e_rxtx_vec_altivec.c | 220 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, RTE_I40E_DESCS_PER_LOOP); in _recv_raw_pkts_vec() 456 uint16_t nb_pkts) in i40e_recv_pkts_vec() argument 469 uint16_t nb_pkts) in i40e_recv_scattered_burst_vec() argument 507 uint16_t nb_pkts) in i40e_recv_scattered_pkts_vec() argument 518 nb_pkts -= burst; in i40e_recv_scattered_pkts_vec() 525 nb_pkts); in i40e_recv_scattered_pkts_vec() 553 uint16_t nb_pkts) in i40e_xmit_fixed_burst_vec() argument 564 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec() 569 nb_pkts = (uint16_t)RTE_MIN(txq->nb_tx_free, nb_pkts); in i40e_xmit_fixed_burst_vec() 570 nb_commit = nb_pkts; in i40e_xmit_fixed_burst_vec() [all …]
|
| H A D | i40e_rxtx_vec_neon.c | 235 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, RTE_I40E_DESCS_PER_LOOP); in _recv_raw_pkts_vec() 448 uint16_t nb_pkts) in i40e_recv_scattered_burst_vec() argument 488 uint16_t nb_pkts) in i40e_recv_scattered_pkts_vec() argument 492 while (nb_pkts > RTE_I40E_VPMD_RX_BURST) { in i40e_recv_scattered_pkts_vec() 499 nb_pkts -= burst; in i40e_recv_scattered_pkts_vec() 506 nb_pkts); in i40e_recv_scattered_pkts_vec() 523 uint16_t nb_pkts, uint64_t flags) in vtx() argument 544 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec() 549 nb_commit = nb_pkts = (uint16_t)RTE_MIN(txq->nb_tx_free, nb_pkts); in i40e_xmit_fixed_burst_vec() 550 if (unlikely(nb_pkts == 0)) in i40e_xmit_fixed_burst_vec() [all …]
|
| H A D | i40e_rxtx_vec_avx2.c | 238 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, RTE_I40E_DESCS_PER_LOOP_AVX); in _recv_raw_pkts_vec_avx2() 738 uint16_t nb_pkts) in i40e_recv_pkts_vec_avx2() argument 799 nb_pkts -= burst; in i40e_recv_scattered_pkts_vec_avx2() 835 for (; nb_pkts > 3; txdp += 4, pkt += 4, nb_pkts -= 4) { in vtx() 856 while (nb_pkts) { in vtx() 864 uint16_t nb_pkts) in i40e_xmit_fixed_burst_vec_avx2() argument 874 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in i40e_xmit_fixed_burst_vec_avx2() 879 nb_commit = nb_pkts = (uint16_t)RTE_MIN(txq->nb_tx_free, nb_pkts); in i40e_xmit_fixed_burst_vec_avx2() 926 return nb_pkts; in i40e_xmit_fixed_burst_vec_avx2() 936 while (nb_pkts) { in i40e_xmit_pkts_vec_avx2() [all …]
|
| /f-stack/dpdk/drivers/net/thunderx/ |
| H A D | nicvf_rxtx.c | 129 uint16_t nb_pkts) in nicvf_free_xmitted_buffers() argument 186 uint16_t nb_pkts) in nicvf_xmit_pkts_multiseg() argument 203 for (i = 0; i < nb_pkts; i++) { in nicvf_xmit_pkts_multiseg() 399 if (unlikely(available_space < nb_pkts)) in nicvf_rx_pkts_to_process() 489 uint16_t nb_pkts) in nicvf_recv_pkts_no_offload() argument 497 uint16_t nb_pkts) in nicvf_recv_pkts_cksum() argument 505 uint16_t nb_pkts) in nicvf_recv_pkts_vlan_strip() argument 513 uint16_t nb_pkts) in nicvf_recv_pkts_cksum_vlan_strip() argument 621 uint16_t nb_pkts) in nicvf_recv_pkts_multiseg_no_offload() argument 629 uint16_t nb_pkts) in nicvf_recv_pkts_multiseg_cksum() argument [all …]
|
| /f-stack/dpdk/examples/ipsec-secgw/ |
| H A D | sad.h | 62 void *sa[], uint16_t nb_pkts) in sad_lookup() argument 69 struct rte_ipsec_sadv4_key v4[nb_pkts]; in sad_lookup() 70 struct rte_ipsec_sadv6_key v6[nb_pkts]; in sad_lookup() 71 int v4_idxes[nb_pkts]; in sad_lookup() 72 int v6_idxes[nb_pkts]; in sad_lookup() 73 const union rte_ipsec_sad_key *keys_v4[nb_pkts]; in sad_lookup() 74 const union rte_ipsec_sad_key *keys_v6[nb_pkts]; in sad_lookup() 75 void *v4_res[nb_pkts]; in sad_lookup() 76 void *v6_res[nb_pkts]; in sad_lookup() 85 for (i = 0; i < nb_pkts; i++) { in sad_lookup()
|
| /f-stack/dpdk/app/test-pmd/ |
| H A D | util.c | 61 uint16_t nb_pkts, int is_rx) in dump_pkt_burst() argument 78 if (!nb_pkts) in dump_pkt_burst() 83 (unsigned int) nb_pkts); in dump_pkt_burst() 84 for (i = 0; i < nb_pkts; i++) { in dump_pkt_burst() 242 return nb_pkts; in dump_rx_pkts() 250 return nb_pkts; in dump_tx_pkts() 265 for (i = 0; i < nb_pkts; i++) { in tx_pkt_set_md() 270 return nb_pkts; in tx_pkt_set_md() 318 struct rte_mbuf *pkts[], uint16_t nb_pkts, in tx_pkt_set_dynf() argument 324 for (i = 0; i < nb_pkts; i++) in tx_pkt_set_dynf() [all …]
|
| /f-stack/dpdk/drivers/net/ixgbe/ |
| H A D | ixgbe_rxtx_vec_neon.c | 226 nb_pkts = RTE_ALIGN_FLOOR(nb_pkts, RTE_IXGBE_DESCS_PER_LOOP); in _recv_raw_pkts_vec() 388 uint16_t nb_pkts) in ixgbe_recv_pkts_vec() argument 403 uint16_t nb_pkts) in ixgbe_recv_scattered_burst_vec() argument 440 uint16_t nb_pkts) in ixgbe_recv_scattered_pkts_vec() argument 451 nb_pkts -= burst; in ixgbe_recv_scattered_pkts_vec() 458 nb_pkts); in ixgbe_recv_scattered_pkts_vec() 484 uint16_t nb_pkts) in ixgbe_xmit_fixed_burst_vec() argument 495 nb_pkts = RTE_MIN(nb_pkts, txq->tx_rs_thresh); in ixgbe_xmit_fixed_burst_vec() 500 nb_commit = nb_pkts = (uint16_t)RTE_MIN(txq->nb_tx_free, nb_pkts); in ixgbe_xmit_fixed_burst_vec() 501 if (unlikely(nb_pkts == 0)) in ixgbe_xmit_fixed_burst_vec() [all …]
|
| /f-stack/dpdk/lib/librte_gro/ |
| H A D | rte_gro.c | 143 uint16_t nb_pkts, in rte_gro_reassemble_burst() argument 179 return nb_pkts; in rte_gro_reassemble_burst() 239 for (i = 0; i < nb_pkts; i++) { in rte_gro_reassemble_burst() 282 if ((nb_after_gro < nb_pkts) in rte_gro_reassemble_burst() 288 0, pkts, nb_pkts); in rte_gro_reassemble_burst() 293 0, &pkts[i], nb_pkts - i); in rte_gro_reassemble_burst() 299 &pkts[i], nb_pkts - i); in rte_gro_reassemble_burst() 304 &pkts[i], nb_pkts - i); in rte_gro_reassemble_burst() 320 uint16_t nb_pkts, in rte_gro_reassemble() argument 334 return nb_pkts; in rte_gro_reassemble() [all …]
|