| /f-stack/dpdk/app/test-pmd/ |
| H A D | macswap.c | 57 uint16_t nb_rx; in pkt_burst_mac_swap() local 69 inc_rx_burst_stats(fs, nb_rx); in pkt_burst_mac_swap() 70 if (unlikely(nb_rx == 0)) in pkt_burst_mac_swap() 73 fs->rx_packets += nb_rx; in pkt_burst_mac_swap() 76 do_macswap(pkts_burst, nb_rx, txp); in pkt_burst_mac_swap() 82 if (unlikely(nb_tx < nb_rx) && fs->retry_enabled) { in pkt_burst_mac_swap() 84 while (nb_tx < nb_rx && retry++ < burst_tx_retry_num) { in pkt_burst_mac_swap() 87 &pkts_burst[nb_tx], nb_rx - nb_tx); in pkt_burst_mac_swap() 92 if (unlikely(nb_tx < nb_rx)) { in pkt_burst_mac_swap() 93 fs->fwd_dropped += (nb_rx - nb_tx); in pkt_burst_mac_swap() [all …]
|
| H A D | iofwd.c | 50 uint16_t nb_rx; in pkt_burst_io_forward() local 60 nb_rx = rte_eth_rx_burst(fs->rx_port, fs->rx_queue, in pkt_burst_io_forward() 62 inc_rx_burst_stats(fs, nb_rx); in pkt_burst_io_forward() 63 if (unlikely(nb_rx == 0)) in pkt_burst_io_forward() 65 fs->rx_packets += nb_rx; in pkt_burst_io_forward() 68 pkts_burst, nb_rx); in pkt_burst_io_forward() 72 if (unlikely(nb_tx < nb_rx) && fs->retry_enabled) { in pkt_burst_io_forward() 77 &pkts_burst[nb_tx], nb_rx - nb_tx); in pkt_burst_io_forward() 82 if (unlikely(nb_tx < nb_rx)) { in pkt_burst_io_forward() 83 fs->fwd_dropped += (nb_rx - nb_tx); in pkt_burst_io_forward() [all …]
|
| H A D | noisy_vnf.c | 113 if (nb_tx < nb_rx) { in drop_pkts() 116 } while (++nb_tx < nb_rx); in drop_pkts() 119 return nb_rx - nb_tx; in drop_pkts() 147 uint16_t nb_rx = 0; in pkt_burst_noisy_vnf() local 157 inc_rx_burst_stats(fs, nb_rx); in pkt_burst_noisy_vnf() 158 if (unlikely(nb_rx == 0)) in pkt_burst_noisy_vnf() 160 fs->rx_packets += nb_rx; in pkt_burst_noisy_vnf() 165 pkts_burst, nb_rx); in pkt_burst_noisy_vnf() 175 if (fifo_free >= nb_rx) { in pkt_burst_noisy_vnf() 178 if (nb_enqd < nb_rx) in pkt_burst_noisy_vnf() [all …]
|
| H A D | macfwd.c | 54 uint16_t nb_rx; in pkt_burst_mac_forward() local 68 inc_rx_burst_stats(fs, nb_rx); in pkt_burst_mac_forward() 69 if (unlikely(nb_rx == 0)) in pkt_burst_mac_forward() 72 fs->rx_packets += nb_rx; in pkt_burst_mac_forward() 81 for (i = 0; i < nb_rx; i++) { in pkt_burst_mac_forward() 82 if (likely(i < nb_rx - 1)) in pkt_burst_mac_forward() 102 if (unlikely(nb_tx < nb_rx) && fs->retry_enabled) { in pkt_burst_mac_forward() 107 &pkts_burst[nb_tx], nb_rx - nb_tx); in pkt_burst_mac_forward() 113 if (unlikely(nb_tx < nb_rx)) { in pkt_burst_mac_forward() 114 fs->fwd_dropped += (nb_rx - nb_tx); in pkt_burst_mac_forward() [all …]
|
| H A D | 5tswap.c | 93 uint16_t nb_rx; in pkt_burst_5tuple_swap() local 117 inc_rx_burst_stats(fs, nb_rx); in pkt_burst_5tuple_swap() 118 if (unlikely(nb_rx == 0)) in pkt_burst_5tuple_swap() 121 fs->rx_packets += nb_rx; in pkt_burst_5tuple_swap() 124 vlan_qinq_set(pkts_burst, nb_rx, ol_flags, in pkt_burst_5tuple_swap() 126 for (i = 0; i < nb_rx; i++) { in pkt_burst_5tuple_swap() 127 if (likely(i < nb_rx - 1)) in pkt_burst_5tuple_swap() 174 &pkts_burst[nb_tx], nb_rx - nb_tx); in pkt_burst_5tuple_swap() 179 if (unlikely(nb_tx < nb_rx)) { in pkt_burst_5tuple_swap() 180 fs->fwd_dropped += (nb_rx - nb_tx); in pkt_burst_5tuple_swap() [all …]
|
| H A D | rxonly.c | 50 uint16_t nb_rx; in pkt_burst_receive() local 59 nb_rx = rte_eth_rx_burst(fs->rx_port, fs->rx_queue, pkts_burst, in pkt_burst_receive() 61 inc_rx_burst_stats(fs, nb_rx); in pkt_burst_receive() 62 if (unlikely(nb_rx == 0)) in pkt_burst_receive() 65 fs->rx_packets += nb_rx; in pkt_burst_receive() 66 for (i = 0; i < nb_rx; i++) in pkt_burst_receive()
|
| H A D | flowgen.c | 94 uint16_t nb_rx; in pkt_burst_flow_gen() local 106 nb_rx = rte_eth_rx_burst(fs->rx_port, fs->rx_queue, pkts_burst, in pkt_burst_flow_gen() 108 fs->rx_packets += nb_rx; in pkt_burst_flow_gen() 110 for (i = 0; i < nb_rx; i++) in pkt_burst_flow_gen() 181 if (unlikely(nb_tx < nb_rx) && fs->retry_enabled) { in pkt_burst_flow_gen() 183 while (nb_tx < nb_rx && retry++ < burst_tx_retry_num) { in pkt_burst_flow_gen() 186 &pkts_burst[nb_tx], nb_rx - nb_tx); in pkt_burst_flow_gen()
|
| H A D | csumonly.c | 807 uint16_t nb_rx; in pkt_burst_checksum_forward() local 829 if (unlikely(nb_rx == 0)) in pkt_burst_checksum_forward() 832 fs->rx_packets += nb_rx; in pkt_burst_checksum_forward() 847 if (likely(i < nb_rx - 1)) in pkt_burst_checksum_forward() 1054 nb_rx = rte_gro_reassemble_burst(pkts_burst, nb_rx, in pkt_burst_checksum_forward() 1058 nb_rx = rte_gro_reassemble(pkts_burst, nb_rx, gro_ctx); in pkt_burst_checksum_forward() 1067 &pkts_burst[nb_rx], in pkt_burst_checksum_forward() 1100 nb_rx = nb_segments; in pkt_burst_checksum_forward() 1104 tx_pkts_burst, nb_rx); in pkt_burst_checksum_forward() 1105 if (nb_prep != nb_rx) in pkt_burst_checksum_forward() [all …]
|
| /f-stack/dpdk/drivers/net/failsafe/ |
| H A D | failsafe_rxtx.c | 87 uint16_t nb_rx; in failsafe_rx_burst() local 93 nb_rx = 0; in failsafe_rx_burst() 99 nb_rx = ETH(sdev)-> in failsafe_rx_burst() 105 if (nb_rx) in failsafe_rx_burst() 106 failsafe_rx_set_port(rx_pkts, nb_rx, in failsafe_rx_burst() 108 return nb_rx; in failsafe_rx_burst() 119 uint16_t nb_rx; in failsafe_rx_burst_fast() local 127 nb_rx = ETH(sdev)-> in failsafe_rx_burst_fast() 133 if (nb_rx) in failsafe_rx_burst_fast() 134 failsafe_rx_set_port(rx_pkts, nb_rx, in failsafe_rx_burst_fast() [all …]
|
| /f-stack/dpdk/app/test/ |
| H A D | test_pmd_perf.c | 357 count += nb_rx; in measure_rxtx() 360 drop += (nb_rx - nb_tx); in measure_rxtx() 397 count += nb_rx; in measure_rxonly() 400 drop += (nb_rx - nb_tx); in measure_rxonly() 433 count += nb_rx; in measure_txonly() 438 drop += (nb_rx - nb_tx); in measure_txonly() 508 while (nb_tx < nb_rx) in main_loop() 510 nb_free += nb_rx; in main_loop() 583 next[portid] += nb_rx; in poll_burst() 584 num[portid] -= nb_rx; in poll_burst() [all …]
|
| /f-stack/dpdk/examples/l3fwd/ |
| H A D | l3fwd_em_sequential.h | 76 l3fwd_em_send_packets(int nb_rx, struct rte_mbuf **pkts_burst, in l3fwd_em_send_packets() argument 82 if (nb_rx > 0) { in l3fwd_em_send_packets() 87 for (i = 1, j = 0; j < nb_rx; i++, j++) { in l3fwd_em_send_packets() 88 if (i < nb_rx) { in l3fwd_em_send_packets() 96 send_packets_multi(qconf, pkts_burst, dst_port, nb_rx); in l3fwd_em_send_packets() 104 l3fwd_em_process_events(int nb_rx, struct rte_event **events, in l3fwd_em_process_events() argument 112 for (i = 1, j = 0; j < nb_rx; i++, j++) { in l3fwd_em_process_events() 115 if (i < nb_rx) { in l3fwd_em_process_events()
|
| H A D | l3fwd_em.h | 125 l3fwd_em_no_opt_send_packets(int nb_rx, struct rte_mbuf **pkts_burst, in l3fwd_em_no_opt_send_packets() argument 131 for (j = 0; j < PREFETCH_OFFSET && j < nb_rx; j++) in l3fwd_em_no_opt_send_packets() 138 for (j = 0; j < (nb_rx - PREFETCH_OFFSET); j++) { in l3fwd_em_no_opt_send_packets() 145 for (; j < nb_rx; j++) in l3fwd_em_no_opt_send_packets() 154 l3fwd_em_no_opt_process_events(int nb_rx, struct rte_event **events, in l3fwd_em_no_opt_process_events() argument 160 for (j = 0; j < PREFETCH_OFFSET && j < nb_rx; j++) in l3fwd_em_no_opt_process_events() 167 for (j = 0; j < (nb_rx - PREFETCH_OFFSET); j++) { in l3fwd_em_no_opt_process_events() 174 for (; j < nb_rx; j++) in l3fwd_em_no_opt_process_events()
|
| H A D | l3fwd_em_hlm.h | 185 l3fwd_em_send_packets(int nb_rx, struct rte_mbuf **pkts_burst, in l3fwd_em_send_packets() argument 195 int32_t n = RTE_ALIGN_FLOOR(nb_rx, EM_HASH_LOOKUP_COUNT); in l3fwd_em_send_packets() 197 for (j = 0; j < EM_HASH_LOOKUP_COUNT && j < nb_rx; j++) { in l3fwd_em_send_packets() 215 i < EM_HASH_LOOKUP_COUNT && pos < nb_rx; i++, pos++) { in l3fwd_em_send_packets() 238 for (; j < nb_rx; j++) in l3fwd_em_send_packets() 241 send_packets_multi(qconf, pkts_burst, dst_port, nb_rx); in l3fwd_em_send_packets() 250 l3fwd_em_process_events(int nb_rx, struct rte_event **ev, in l3fwd_em_process_events() argument 261 int32_t n = RTE_ALIGN_FLOOR(nb_rx, EM_HASH_LOOKUP_COUNT); in l3fwd_em_process_events() 263 for (j = 0; j < EM_HASH_LOOKUP_COUNT && j < nb_rx; j++) { in l3fwd_em_process_events() 282 i < EM_HASH_LOOKUP_COUNT && pos < nb_rx; i++, pos++) { in l3fwd_em_process_events() [all …]
|
| H A D | l3fwd_lpm.h | 79 l3fwd_lpm_no_opt_send_packets(int nb_rx, struct rte_mbuf **pkts_burst, in l3fwd_lpm_no_opt_send_packets() argument 85 for (j = 0; j < PREFETCH_OFFSET && j < nb_rx; j++) in l3fwd_lpm_no_opt_send_packets() 89 for (j = 0; j < (nb_rx - PREFETCH_OFFSET); j++) { in l3fwd_lpm_no_opt_send_packets() 96 for (; j < nb_rx; j++) in l3fwd_lpm_no_opt_send_packets()
|
| /f-stack/dpdk/app/test-eventdev/ |
| H A D | test_pipeline_atq.c | 70 if (!nb_rx) { in pipeline_atq_worker_single_stage_burst_tx() 75 for (i = 0; i < nb_rx; i++) { in pipeline_atq_worker_single_stage_burst_tx() 80 pipeline_event_tx_burst(dev, port, ev, nb_rx); in pipeline_atq_worker_single_stage_burst_tx() 81 w->processed_pkts += nb_rx; in pipeline_atq_worker_single_stage_burst_tx() 97 if (!nb_rx) { in pipeline_atq_worker_single_stage_burst_fwd() 102 for (i = 0; i < nb_rx; i++) { in pipeline_atq_worker_single_stage_burst_fwd() 110 w->processed_pkts += nb_rx; in pipeline_atq_worker_single_stage_burst_fwd() 185 if (!nb_rx) { in pipeline_atq_worker_multi_stage_burst_tx() 190 for (i = 0; i < nb_rx; i++) { in pipeline_atq_worker_multi_stage_burst_tx() 221 if (!nb_rx) { in pipeline_atq_worker_multi_stage_burst_fwd() [all …]
|
| H A D | test_pipeline_queue.c | 77 if (!nb_rx) { in pipeline_queue_worker_single_stage_burst_tx() 82 for (i = 0; i < nb_rx; i++) { in pipeline_queue_worker_single_stage_burst_tx() 95 pipeline_event_enqueue_burst(dev, port, ev, nb_rx); in pipeline_queue_worker_single_stage_burst_tx() 111 if (!nb_rx) { in pipeline_queue_worker_single_stage_burst_fwd() 116 for (i = 0; i < nb_rx; i++) { in pipeline_queue_worker_single_stage_burst_fwd() 123 pipeline_event_enqueue_burst(dev, port, ev, nb_rx); in pipeline_queue_worker_single_stage_burst_fwd() 124 w->processed_pkts += nb_rx; in pipeline_queue_worker_single_stage_burst_fwd() 205 if (!nb_rx) { in pipeline_queue_worker_multi_stage_burst_tx() 210 for (i = 0; i < nb_rx; i++) { in pipeline_queue_worker_multi_stage_burst_tx() 243 if (!nb_rx) { in pipeline_queue_worker_multi_stage_burst_fwd() [all …]
|
| H A D | test_pipeline_common.h | 115 struct rte_event *ev, const uint16_t nb_rx) in pipeline_event_tx_burst() argument 119 enq = rte_event_eth_tx_adapter_enqueue(dev, port, ev, nb_rx, 0); in pipeline_event_tx_burst() 120 while (enq < nb_rx) { in pipeline_event_tx_burst() 122 ev + enq, nb_rx - enq, 0); in pipeline_event_tx_burst() 136 struct rte_event *ev, const uint16_t nb_rx) in pipeline_event_enqueue_burst() argument 140 enq = rte_event_enqueue_burst(dev, port, ev, nb_rx); in pipeline_event_enqueue_burst() 141 while (enq < nb_rx) { in pipeline_event_enqueue_burst() 143 ev + enq, nb_rx - enq); in pipeline_event_enqueue_burst()
|
| H A D | test_order_atq.c | 63 uint16_t const nb_rx = rte_event_dequeue_burst(dev_id, port, ev, in order_atq_worker_burst() local 66 if (nb_rx == 0) { in order_atq_worker_burst() 73 for (i = 0; i < nb_rx; i++) { in order_atq_worker_burst() 90 enq = rte_event_enqueue_burst(dev_id, port, ev, nb_rx); in order_atq_worker_burst() 91 while (enq < nb_rx) { in order_atq_worker_burst() 93 ev + enq, nb_rx - enq); in order_atq_worker_burst()
|
| H A D | test_order_queue.c | 63 uint16_t const nb_rx = rte_event_dequeue_burst(dev_id, port, ev, in order_queue_worker_burst() local 66 if (nb_rx == 0) { in order_queue_worker_burst() 73 for (i = 0; i < nb_rx; i++) { in order_queue_worker_burst() 91 enq = rte_event_enqueue_burst(dev_id, port, ev, nb_rx); in order_queue_worker_burst() 92 while (enq < nb_rx) { in order_queue_worker_burst() 94 ev + enq, nb_rx - enq); in order_queue_worker_burst()
|
| /f-stack/dpdk/examples/eventdev_pipeline/ |
| H A D | pipeline_worker_tx.c | 32 while (enq < nb_rx) { in worker_event_enqueue_burst() 135 if (!nb_rx) { in worker_do_tx_single_burst() 139 received += nb_rx; in worker_do_tx_single_burst() 157 fwd += nb_rx; in worker_do_tx_single_burst() 181 if (!nb_rx) { in worker_do_tx_single_burst_atq() 186 received += nb_rx; in worker_do_tx_single_burst_atq() 201 fwd += nb_rx; in worker_do_tx_single_burst_atq() 323 if (nb_rx == 0) { in worker_do_tx_burst() 352 fwd += nb_rx; in worker_do_tx_burst() 379 if (nb_rx == 0) { in worker_do_tx_burst_atq() [all …]
|
| /f-stack/dpdk/examples/qos_sched/ |
| H A D | app_thread.c | 64 uint32_t i, nb_rx; in app_rx_thread() local 76 nb_rx = rte_eth_rx_burst(conf->rx_port, conf->rx_queue, rx_mbufs, in app_rx_thread() 79 if (likely(nb_rx != 0)) { in app_rx_thread() 80 APP_STATS_ADD(conf->stat.nb_rx, nb_rx); in app_rx_thread() 82 for(i = 0; i < nb_rx; i++) { in app_rx_thread() 93 (void **)rx_mbufs, nb_rx, NULL) == 0)) { in app_rx_thread() 94 for(i = 0; i < nb_rx; i++) { in app_rx_thread() 210 APP_STATS_ADD(conf->stat.nb_rx, nb_pkt); in app_worker_thread() 246 APP_STATS_ADD(conf->stat.nb_rx, nb_pkt); in app_mixed_thread()
|
| /f-stack/dpdk/examples/l2fwd-event/ |
| H A D | l2fwd_event.c | 237 uint16_t nb_rx, nb_tx; in l2fwd_event_loop_burst() local 248 nb_rx = rte_event_dequeue_burst(event_d_id, port_id, ev, in l2fwd_event_loop_burst() 250 if (nb_rx == 0) in l2fwd_event_loop_burst() 253 for (i = 0; i < nb_rx; i++) { in l2fwd_event_loop_burst() 260 ev, nb_rx); in l2fwd_event_loop_burst() 261 while (nb_tx < nb_rx && !rsrc->force_quit) in l2fwd_event_loop_burst() 264 nb_rx - nb_tx); in l2fwd_event_loop_burst() 270 nb_rx, 0); in l2fwd_event_loop_burst() 271 while (nb_tx < nb_rx && !rsrc->force_quit) in l2fwd_event_loop_burst() 274 ev + nb_tx, nb_rx - nb_tx, 0); in l2fwd_event_loop_burst()
|
| /f-stack/dpdk/drivers/net/virtio/ |
| H A D | virtio_rxtx.c | 968 nb_rx = 0; in virtio_recv_pkts() 1056 return nb_rx; in virtio_recv_pkts() 1075 nb_rx = 0; in virtio_recv_pkts_packed() 1157 return nb_rx; in virtio_recv_pkts_packed() 1181 nb_rx = 0; in virtio_recv_pkts_inorder() 1268 nb_rx++; in virtio_recv_pkts_inorder() 1298 nb_rx++; in virtio_recv_pkts_inorder() 1447 nb_rx++; in virtio_recv_mergeable_pkts() 1477 nb_rx++; in virtio_recv_mergeable_pkts() 1619 nb_rx++; in virtio_recv_mergeable_pkts_packed() [all …]
|
| /f-stack/dpdk/examples/l2fwd-cat/ |
| H A D | l2fwd-cat.c | 130 const uint16_t nb_rx = rte_eth_rx_burst(port, 0, in lcore_main() local 133 if (unlikely(nb_rx == 0)) in lcore_main() 138 bufs, nb_rx); in lcore_main() 141 if (unlikely(nb_tx < nb_rx)) { in lcore_main() 143 for (buf = nb_tx; buf < nb_rx; buf++) in lcore_main()
|
| /f-stack/dpdk/examples/skeleton/ |
| H A D | basicfwd.c | 145 const uint16_t nb_rx = rte_eth_rx_burst(port, 0, in lcore_main() local 148 if (unlikely(nb_rx == 0)) in lcore_main() 153 bufs, nb_rx); in lcore_main() 156 if (unlikely(nb_tx < nb_rx)) { in lcore_main() 158 for (buf = nb_tx; buf < nb_rx; buf++) in lcore_main()
|