Home
last modified time | relevance | path

Searched refs:nb_tx (Results 1 – 25 of 73) sorted by relevance

123

/f-stack/dpdk/app/test-pmd/
H A Dnoisy_vnf.c104 &pkts[nb_tx], nb_rx - nb_tx); in do_retry()
107 return nb_tx; in do_retry()
113 if (nb_tx < nb_rx) { in drop_pkts()
115 rte_pktmbuf_free(pkts[nb_tx]); in drop_pkts()
116 } while (++nb_tx < nb_rx); in drop_pkts()
119 return nb_rx - nb_tx; in drop_pkts()
148 uint16_t nb_tx = 0; in pkt_burst_noisy_vnf() local
167 nb_tx += do_retry(nb_rx, nb_tx, pkts_burst, fs); in pkt_burst_noisy_vnf()
169 fs->tx_packets += nb_tx; in pkt_burst_noisy_vnf()
191 nb_tx += do_retry(nb_rx, nb_tx, tmp_pkts, fs); in pkt_burst_noisy_vnf()
[all …]
H A Diofwd.c51 uint16_t nb_tx; in pkt_burst_io_forward() local
67 nb_tx = rte_eth_tx_burst(fs->tx_port, fs->tx_queue, in pkt_burst_io_forward()
72 if (unlikely(nb_tx < nb_rx) && fs->retry_enabled) { in pkt_burst_io_forward()
76 nb_tx += rte_eth_tx_burst(fs->tx_port, fs->tx_queue, in pkt_burst_io_forward()
77 &pkts_burst[nb_tx], nb_rx - nb_tx); in pkt_burst_io_forward()
80 fs->tx_packets += nb_tx; in pkt_burst_io_forward()
81 inc_tx_burst_stats(fs, nb_tx); in pkt_burst_io_forward()
82 if (unlikely(nb_tx < nb_rx)) { in pkt_burst_io_forward()
83 fs->fwd_dropped += (nb_rx - nb_tx); in pkt_burst_io_forward()
85 rte_pktmbuf_free(pkts_burst[nb_tx]); in pkt_burst_io_forward()
[all …]
H A Dmacswap.c58 uint16_t nb_tx; in pkt_burst_mac_swap() local
82 if (unlikely(nb_tx < nb_rx) && fs->retry_enabled) { in pkt_burst_mac_swap()
84 while (nb_tx < nb_rx && retry++ < burst_tx_retry_num) { in pkt_burst_mac_swap()
86 nb_tx += rte_eth_tx_burst(fs->tx_port, fs->tx_queue, in pkt_burst_mac_swap()
87 &pkts_burst[nb_tx], nb_rx - nb_tx); in pkt_burst_mac_swap()
90 fs->tx_packets += nb_tx; in pkt_burst_mac_swap()
91 inc_tx_burst_stats(fs, nb_tx); in pkt_burst_mac_swap()
92 if (unlikely(nb_tx < nb_rx)) { in pkt_burst_mac_swap()
93 fs->fwd_dropped += (nb_rx - nb_tx); in pkt_burst_mac_swap()
95 rte_pktmbuf_free(pkts_burst[nb_tx]); in pkt_burst_mac_swap()
[all …]
H A Dmacfwd.c55 uint16_t nb_tx; in pkt_burst_mac_forward() local
102 if (unlikely(nb_tx < nb_rx) && fs->retry_enabled) { in pkt_burst_mac_forward()
104 while (nb_tx < nb_rx && retry++ < burst_tx_retry_num) { in pkt_burst_mac_forward()
106 nb_tx += rte_eth_tx_burst(fs->tx_port, fs->tx_queue, in pkt_burst_mac_forward()
107 &pkts_burst[nb_tx], nb_rx - nb_tx); in pkt_burst_mac_forward()
111 fs->tx_packets += nb_tx; in pkt_burst_mac_forward()
112 inc_tx_burst_stats(fs, nb_tx); in pkt_burst_mac_forward()
113 if (unlikely(nb_tx < nb_rx)) { in pkt_burst_mac_forward()
114 fs->fwd_dropped += (nb_rx - nb_tx); in pkt_burst_mac_forward()
116 rte_pktmbuf_free(pkts_burst[nb_tx]); in pkt_burst_mac_forward()
[all …]
H A Dflowgen.c95 uint16_t nb_tx; in pkt_burst_flow_gen() local
181 if (unlikely(nb_tx < nb_rx) && fs->retry_enabled) { in pkt_burst_flow_gen()
183 while (nb_tx < nb_rx && retry++ < burst_tx_retry_num) { in pkt_burst_flow_gen()
185 nb_tx += rte_eth_tx_burst(fs->tx_port, fs->tx_queue, in pkt_burst_flow_gen()
186 &pkts_burst[nb_tx], nb_rx - nb_tx); in pkt_burst_flow_gen()
189 fs->tx_packets += nb_tx; in pkt_burst_flow_gen()
191 inc_tx_burst_stats(fs, nb_tx); in pkt_burst_flow_gen()
192 if (unlikely(nb_tx < nb_pkt)) { in pkt_burst_flow_gen()
194 next_flow -= (nb_pkt - nb_tx); in pkt_burst_flow_gen()
199 rte_pktmbuf_free(pkts_burst[nb_tx]); in pkt_burst_flow_gen()
[all …]
H A D5tswap.c94 uint16_t nb_tx; in pkt_burst_5tuple_swap() local
169 if (unlikely(nb_tx < nb_rx) && fs->retry_enabled) { in pkt_burst_5tuple_swap()
171 while (nb_tx < nb_rx && retry++ < burst_tx_retry_num) { in pkt_burst_5tuple_swap()
173 nb_tx += rte_eth_tx_burst(fs->tx_port, fs->tx_queue, in pkt_burst_5tuple_swap()
174 &pkts_burst[nb_tx], nb_rx - nb_tx); in pkt_burst_5tuple_swap()
177 fs->tx_packets += nb_tx; in pkt_burst_5tuple_swap()
178 inc_tx_burst_stats(fs, nb_tx); in pkt_burst_5tuple_swap()
179 if (unlikely(nb_tx < nb_rx)) { in pkt_burst_5tuple_swap()
180 fs->fwd_dropped += (nb_rx - nb_tx); in pkt_burst_5tuple_swap()
182 rte_pktmbuf_free(pkts_burst[nb_tx]); in pkt_burst_5tuple_swap()
[all …]
H A Dicmpecho.c287 uint16_t nb_tx; in reply_to_icmp_echo_rqsts() local
494 while (nb_tx < nb_replies && in reply_to_icmp_echo_rqsts()
497 nb_tx += rte_eth_tx_burst(fs->tx_port, in reply_to_icmp_echo_rqsts()
499 &pkts_burst[nb_tx], in reply_to_icmp_echo_rqsts()
500 nb_replies - nb_tx); in reply_to_icmp_echo_rqsts()
503 fs->tx_packets += nb_tx; in reply_to_icmp_echo_rqsts()
504 inc_tx_burst_stats(fs, nb_tx); in reply_to_icmp_echo_rqsts()
505 if (unlikely(nb_tx < nb_replies)) { in reply_to_icmp_echo_rqsts()
506 fs->fwd_dropped += (nb_replies - nb_tx); in reply_to_icmp_echo_rqsts()
508 rte_pktmbuf_free(pkts_burst[nb_tx]); in reply_to_icmp_echo_rqsts()
[all …]
H A Dtxonly.c328 uint16_t nb_tx; in pkt_burst_transmit() local
401 &pkts_burst[nb_tx], nb_pkt - nb_tx); in pkt_burst_transmit()
404 fs->tx_packets += nb_tx; in pkt_burst_transmit()
407 RTE_PER_LCORE(_ip_var) -= nb_pkt - nb_tx; in pkt_burst_transmit()
409 inc_tx_burst_stats(fs, nb_tx); in pkt_burst_transmit()
410 if (unlikely(nb_tx < nb_pkt)) { in pkt_burst_transmit()
415 (unsigned) nb_pkt, (unsigned) nb_tx, in pkt_burst_transmit()
416 (unsigned) (nb_pkt - nb_tx)); in pkt_burst_transmit()
417 fs->fwd_dropped += (nb_pkt - nb_tx); in pkt_burst_transmit()
419 rte_pktmbuf_free(pkts_burst[nb_tx]); in pkt_burst_transmit()
[all …]
/f-stack/dpdk/lib/librte_port/
H A Drte_port_ring.c214 uint32_t nb_tx; in send_burst() local
220 for ( ; nb_tx < p->tx_buf_count; nb_tx++) in send_burst()
229 uint32_t nb_tx; in send_burst_mp() local
235 for ( ; nb_tx < p->tx_buf_count; nb_tx++) in send_burst_mp()
490 uint32_t nb_tx = 0, i; in send_burst_nodrop() local
496 if (nb_tx >= p->tx_buf_count) { in send_burst_nodrop()
507 if (nb_tx >= p->tx_buf_count) { in send_burst_nodrop()
515 for ( ; nb_tx < p->tx_buf_count; nb_tx++) in send_burst_nodrop()
524 uint32_t nb_tx = 0, i; in send_burst_mp_nodrop() local
530 if (nb_tx >= p->tx_buf_count) { in send_burst_mp_nodrop()
[all …]
H A Drte_port_sched.c172 __rte_unused uint32_t nb_tx; in rte_port_sched_writer_tx() local
174 nb_tx = rte_sched_port_enqueue(p->sched, p->tx_buf, p->tx_buf_count); in rte_port_sched_writer_tx()
194 __rte_unused uint32_t nb_tx; in rte_port_sched_writer_tx_bulk() local
198 nb_tx = rte_sched_port_enqueue(p->sched, p->tx_buf, in rte_port_sched_writer_tx_bulk()
200 RTE_PORT_SCHED_WRITER_STATS_PKTS_DROP_ADD(p, tx_buf_count - nb_tx); in rte_port_sched_writer_tx_bulk()
204 nb_tx = rte_sched_port_enqueue(p->sched, pkts, n_pkts); in rte_port_sched_writer_tx_bulk()
205 RTE_PORT_SCHED_WRITER_STATS_PKTS_DROP_ADD(p, n_pkts - nb_tx); in rte_port_sched_writer_tx_bulk()
219 __rte_unused uint32_t nb_tx; in rte_port_sched_writer_tx_bulk() local
221 nb_tx = rte_sched_port_enqueue(p->sched, p->tx_buf, in rte_port_sched_writer_tx_bulk()
223 RTE_PORT_SCHED_WRITER_STATS_PKTS_DROP_ADD(p, tx_buf_count - nb_tx); in rte_port_sched_writer_tx_bulk()
[all …]
H A Drte_port_kni.c166 uint32_t nb_tx; in send_burst() local
171 for (; nb_tx < p->tx_buf_count; nb_tx++) in send_burst()
172 rte_pktmbuf_free(p->tx_buf[nb_tx]); in send_burst()
350 uint32_t nb_tx = 0, i; in send_burst_nodrop() local
355 if (nb_tx >= p->tx_buf_count) { in send_burst_nodrop()
361 nb_tx += rte_kni_tx_burst(p->kni, in send_burst_nodrop()
362 p->tx_buf + nb_tx, in send_burst_nodrop()
363 p->tx_buf_count - nb_tx); in send_burst_nodrop()
366 if (nb_tx >= p->tx_buf_count) { in send_burst_nodrop()
374 for ( ; nb_tx < p->tx_buf_count; nb_tx++) in send_burst_nodrop()
[all …]
H A Drte_port_ethdev.c170 uint32_t nb_tx; in send_burst() local
172 nb_tx = rte_eth_tx_burst(p->port_id, p->queue_id, in send_burst()
176 for ( ; nb_tx < p->tx_buf_count; nb_tx++) in send_burst()
177 rte_pktmbuf_free(p->tx_buf[nb_tx]); in send_burst()
358 uint32_t nb_tx = 0, i; in send_burst_nodrop() local
364 if (nb_tx >= p->tx_buf_count) { in send_burst_nodrop()
370 nb_tx += rte_eth_tx_burst(p->port_id, p->queue_id, in send_burst_nodrop()
371 p->tx_buf + nb_tx, p->tx_buf_count - nb_tx); in send_burst_nodrop()
374 if (nb_tx >= p->tx_buf_count) { in send_burst_nodrop()
382 for ( ; nb_tx < p->tx_buf_count; nb_tx++) in send_burst_nodrop()
[all …]
H A Drte_port_sym_crypto.c197 uint32_t nb_tx; in send_burst() local
203 nb_tx); in send_burst()
204 for (; nb_tx < p->tx_buf_count; nb_tx++) in send_burst()
205 rte_pktmbuf_free(p->tx_buf[nb_tx]->sym->m_src); in send_burst()
389 uint32_t nb_tx = 0, i; in send_burst_nodrop() local
395 if (nb_tx >= p->tx_buf_count) { in send_burst_nodrop()
402 p->queue_id, p->tx_buf + nb_tx, in send_burst_nodrop()
403 p->tx_buf_count - nb_tx); in send_burst_nodrop()
406 if (nb_tx >= p->tx_buf_count) { in send_burst_nodrop()
414 p->tx_buf_count - nb_tx); in send_burst_nodrop()
[all …]
H A Drte_port_ras.c138 uint32_t nb_tx; in send_burst() local
140 nb_tx = rte_ring_sp_enqueue_burst(p->ring, (void **)p->tx_buf, in send_burst()
143 RTE_PORT_RING_WRITER_RAS_STATS_PKTS_DROP_ADD(p, p->tx_buf_count - nb_tx); in send_burst()
144 for ( ; nb_tx < p->tx_buf_count; nb_tx++) in send_burst()
145 rte_pktmbuf_free(p->tx_buf[nb_tx]); in send_burst()
/f-stack/dpdk/app/test/
H A Dtest_pmd_perf.c360 drop += (nb_rx - nb_tx); in measure_rxtx()
400 drop += (nb_rx - nb_tx); in measure_rxonly()
438 drop += (nb_rx - nb_tx); in measure_txonly()
482 &tx_burst[idx], nb_tx); in main_loop()
483 num -= nb_tx; in main_loop()
484 idx += nb_tx; in main_loop()
507 nb_tx = 0; in main_loop()
508 while (nb_tx < nb_rx) in main_loop()
636 &tx_burst[idx], nb_tx); in exec_burst()
637 idx += nb_tx; in exec_burst()
[all …]
/f-stack/dpdk/examples/l2fwd-event/
H A Dl2fwd_event.c237 uint16_t nb_rx, nb_tx; in l2fwd_event_loop_burst() local
259 nb_tx = rte_event_enqueue_burst(event_d_id, port_id, in l2fwd_event_loop_burst()
261 while (nb_tx < nb_rx && !rsrc->force_quit) in l2fwd_event_loop_burst()
262 nb_tx += rte_event_enqueue_burst(event_d_id, in l2fwd_event_loop_burst()
263 port_id, ev + nb_tx, in l2fwd_event_loop_burst()
264 nb_rx - nb_tx); in l2fwd_event_loop_burst()
268 nb_tx = rte_event_eth_tx_adapter_enqueue(event_d_id, in l2fwd_event_loop_burst()
271 while (nb_tx < nb_rx && !rsrc->force_quit) in l2fwd_event_loop_burst()
272 nb_tx += rte_event_eth_tx_adapter_enqueue( in l2fwd_event_loop_burst()
274 ev + nb_tx, nb_rx - nb_tx, 0); in l2fwd_event_loop_burst()
/f-stack/dpdk/drivers/net/failsafe/
H A Dfailsafe_rxtx.c147 uint16_t nb_tx; in failsafe_tx_burst() local
155 nb_tx = ETH(sdev)->tx_pkt_burst(sub_txq, tx_pkts, nb_pkts); in failsafe_tx_burst()
157 return nb_tx; in failsafe_tx_burst()
168 uint16_t nb_tx; in failsafe_tx_burst_fast() local
175 nb_tx = ETH(sdev)->tx_pkt_burst(sub_txq, tx_pkts, nb_pkts); in failsafe_tx_burst_fast()
177 return nb_tx; in failsafe_tx_burst_fast()
/f-stack/dpdk/drivers/net/nfb/
H A Dnfb_stats.c16 uint16_t nb_tx = dev->data->nb_tx_queues; in nfb_eth_stats_get() local
37 for (i = 0; i < nb_tx; i++) { in nfb_eth_stats_get()
60 uint16_t nb_tx = dev->data->nb_tx_queues; in nfb_eth_stats_reset() local
72 for (i = 0; i < nb_tx; i++) { in nfb_eth_stats_reset()
H A Dnfb_ethdev.c121 uint16_t nb_tx = dev->data->nb_tx_queues; in nfb_eth_dev_start() local
129 for (i = 0; i < nb_tx; i++) { in nfb_eth_dev_start()
138 for (i = 0; i < nb_tx; i++) in nfb_eth_dev_start()
159 uint16_t nb_tx = dev->data->nb_tx_queues; in nfb_eth_dev_stop() local
163 for (i = 0; i < nb_tx; i++) in nfb_eth_dev_stop()
222 uint16_t nb_tx = dev->data->nb_tx_queues; in nfb_eth_dev_close() local
238 for (i = 0; i < nb_tx; i++) { in nfb_eth_dev_close()
/f-stack/dpdk/examples/ntb/
H A Dntb_fwd.c219 uint16_t nb_tx, buf_size; in cmd_sendfile_parsed() local
302 nb_tx = ret; in cmd_sendfile_parsed()
306 &pkts_send[nb_tx], nb_pkt - nb_tx, in cmd_sendfile_parsed()
314 nb_tx += ret; in cmd_sendfile_parsed()
426 uint16_t nb_rx, nb_tx; in start_iofwd_per_lcore() local
454 nb_tx = ret; in start_iofwd_per_lcore()
476 if (unlikely(nb_tx < nb_rx)) { in start_iofwd_per_lcore()
479 } while (++nb_tx < nb_rx); in start_iofwd_per_lcore()
538 uint16_t nb_pkt, nb_tx; in start_txonly_per_lcore() local
584 nb_tx = ret; in start_txonly_per_lcore()
[all …]
/f-stack/dpdk/drivers/net/virtio/
H A Dvirtio_rxtx.c1697 for (nb_tx = 0; nb_tx < nb_pkts; nb_tx++) { in virtio_xmit_pkts_prepare()
1732 return nb_tx; in virtio_xmit_pkts_prepare()
1747 return nb_tx; in virtio_xmit_pkts_packed()
1758 for (nb_tx = 0; nb_tx < nb_pkts; nb_tx++) { in virtio_xmit_pkts_packed()
1814 return nb_tx; in virtio_xmit_pkts_packed()
1827 return nb_tx; in virtio_xmit_pkts()
1839 for (nb_tx = 0; nb_tx < nb_pkts; nb_tx++) { in virtio_xmit_pkts()
1898 return nb_tx; in virtio_xmit_pkts()
1929 return nb_tx; in virtio_xmit_pkts_inorder()
1941 for (nb_tx = 0; nb_tx < nb_pkts; nb_tx++) { in virtio_xmit_pkts_inorder()
[all …]
H A Dvirtio_rxtx_packed_avx.c258 uint16_t nb_tx = 0; in virtio_xmit_pkts_packed_vec() local
262 return nb_tx; in virtio_xmit_pkts_packed_vec()
277 &tx_pkts[nb_tx])) { in virtio_xmit_pkts_packed_vec()
278 nb_tx += PACKED_BATCH_SIZE; in virtio_xmit_pkts_packed_vec()
284 tx_pkts[nb_tx])) { in virtio_xmit_pkts_packed_vec()
285 nb_tx++; in virtio_xmit_pkts_packed_vec()
292 txvq->stats.packets += nb_tx; in virtio_xmit_pkts_packed_vec()
294 if (likely(nb_tx)) { in virtio_xmit_pkts_packed_vec()
301 return nb_tx; in virtio_xmit_pkts_packed_vec()
/f-stack/dpdk/drivers/net/hns3/
H A Dhns3_rxtx_vec_sve.c426 uint16_t nb_tx = 0; in hns3_xmit_fixed_burst_vec_sve() local
438 nb_tx = txq->nb_tx_desc - txq->next_to_use; in hns3_xmit_fixed_burst_vec_sve()
439 hns3_tx_fill_hw_ring_sve(txq, tx_pkts, nb_tx); in hns3_xmit_fixed_burst_vec_sve()
443 hns3_tx_fill_hw_ring_sve(txq, tx_pkts + nb_tx, nb_pkts - nb_tx); in hns3_xmit_fixed_burst_vec_sve()
444 txq->next_to_use += nb_pkts - nb_tx; in hns3_xmit_fixed_burst_vec_sve()
459 uint16_t nb_tx = 0; in hns3_xmit_pkts_vec_sve() local
463 ret = hns3_xmit_fixed_burst_vec_sve(tx_queue, &tx_pkts[nb_tx], in hns3_xmit_pkts_vec_sve()
465 nb_tx += ret; in hns3_xmit_pkts_vec_sve()
471 return nb_tx; in hns3_xmit_pkts_vec_sve()
H A Dhns3_rxtx_vec.c32 uint16_t nb_tx = 0; in hns3_xmit_pkts_vec() local
38 ret = hns3_xmit_fixed_burst_vec(tx_queue, &tx_pkts[nb_tx], in hns3_xmit_pkts_vec()
40 nb_tx += ret; in hns3_xmit_pkts_vec()
46 return nb_tx; in hns3_xmit_pkts_vec()
/f-stack/dpdk/examples/l2fwd-cat/
H A Dl2fwd-cat.c137 const uint16_t nb_tx = rte_eth_tx_burst(port ^ 1, 0, in lcore_main() local
141 if (unlikely(nb_tx < nb_rx)) { in lcore_main()
143 for (buf = nb_tx; buf < nb_rx; buf++) in lcore_main()

123