| /dpdk/drivers/net/sfc/ |
| H A D | sfc_ev.c | 72 sfc_err(evq->sa, in sfc_ev_nop_rx() 96 SFC_ASSERT(rxq->evq == evq); in sfc_ev_efx_rx() 243 SFC_ASSERT(txq->evq == evq); in sfc_ev_tx() 345 SFC_ASSERT(rxq->evq == evq); in sfc_ev_rxq_flush_done() 376 SFC_ASSERT(rxq->evq == evq); in sfc_ev_rxq_flush_failed() 407 SFC_ASSERT(txq->evq == evq); in sfc_ev_txq_flush_done() 578 efx_ev_qpoll(evq->common, &evq->read_ptr, evq->callbacks, evq); in sfc_ev_qpoll() 641 return efx_ev_qprime(evq->common, evq->read_ptr); in sfc_ev_qprime() 698 SFC_ASSERT(evq->dp_rxq == NULL || evq->dp_txq == NULL); in sfc_ev_qstart() 721 efx_ev_qcreate_check_init_done(evq->common, evq->callbacks, evq); in sfc_ev_qstart() [all …]
|
| H A D | sfc_tx.c | 145 struct sfc_evq *evq; in sfc_tx_qinit() local 191 txq->evq = evq; in sfc_tx_qinit() 249 sfc_ev_qfini(evq); in sfc_tx_qinit() 291 sfc_ev_qfini(txq->evq); in sfc_tx_qfini() 292 txq->evq = NULL; in sfc_tx_qfini() 521 struct sfc_evq *evq; in sfc_tx_qstart() local 536 evq = txq->evq; in sfc_tx_qstart() 599 sfc_ev_qstop(evq); in sfc_tx_qstart() 674 sfc_ev_qstop(txq->evq); in sfc_tx_qstop() 765 sfc_ev_qpoll(txq->evq); in sfc_efx_tx_reap() [all …]
|
| H A D | sfc_ev.h | 218 void sfc_ev_qfini(struct sfc_evq *evq); 219 int sfc_ev_qstart(struct sfc_evq *evq, unsigned int hw_index); 220 void sfc_ev_qstop(struct sfc_evq *evq); 222 int sfc_ev_qprime(struct sfc_evq *evq); 223 void sfc_ev_qpoll(struct sfc_evq *evq);
|
| H A D | sfc_rx.c | 71 if (rxq->evq->read_ptr_primed != rxq->evq->read_ptr) { in sfc_efx_rx_qprime() 72 rc = efx_ev_qprime(rxq->evq->common, rxq->evq->read_ptr); in sfc_efx_rx_qprime() 74 rxq->evq->read_ptr_primed = rxq->evq->read_ptr; in sfc_efx_rx_qprime() 507 rxq->evq = sfc_rxq_by_dp_rxq(&rxq->dp)->evq; in sfc_efx_rx_qcreate() 796 struct sfc_evq *evq; in sfc_rx_qstart() local 809 evq = rxq->evq; in sfc_rx_qstart() 887 sfc_ev_qstop(evq); in sfc_rx_qstart() 1104 struct sfc_evq *evq; in sfc_rx_qinit() local 1198 rxq->evq = evq; in sfc_rx_qinit() 1276 sfc_ev_qfini(evq); in sfc_rx_qinit() [all …]
|
| H A D | sfc_intr.c | 32 struct sfc_evq *evq; in sfc_intr_handle_mgmt_evq() local 36 evq = sa->mgmt_evq; in sfc_intr_handle_mgmt_evq() 40 evq->evq_index); in sfc_intr_handle_mgmt_evq() 42 sfc_ev_qpoll(evq); in sfc_intr_handle_mgmt_evq() 44 if (sfc_ev_qprime(evq) != 0) in sfc_intr_handle_mgmt_evq() 45 sfc_err(sa, "cannot prime EVQ %u", evq->evq_index); in sfc_intr_handle_mgmt_evq()
|
| H A D | sfc_tx.h | 55 struct sfc_evq *evq; member 68 struct sfc_evq *evq; member
|
| H A D | sfc_rx.h | 56 struct sfc_evq *evq; member 71 struct sfc_evq *evq; member
|
| H A D | sfc_ethdev.c | 521 sa = rxq->evq->sa; in sfc_rx_queue_release() 580 SFC_ASSERT(txq->evq != NULL); in sfc_tx_queue_release() 581 sa = txq->evq->sa; in sfc_tx_queue_release()
|
| /dpdk/examples/l3fwd/ |
| H A D | l3fwd_event_generic.c | 134 evt_rsrc->evq.event_q_id, in l3fwd_event_port_setup_generic() 136 evt_rsrc->evq.nb_queues - 1); in l3fwd_event_port_setup_generic() 137 if (ret != (evt_rsrc->evq.nb_queues - 1)) in l3fwd_event_port_setup_generic() 165 evt_rsrc->evq.nb_queues); in l3fwd_event_queue_setup_generic() 166 if (!evt_rsrc->evq.event_q_id) in l3fwd_event_queue_setup_generic() 190 evt_rsrc->evq.event_q_id[event_q_id] = event_q_id; in l3fwd_event_queue_setup_generic() 215 free(evt_rsrc->evq.event_q_id); in l3fwd_rx_tx_adapter_setup_generic() 234 if (i < evt_rsrc->evq.nb_queues) in l3fwd_rx_tx_adapter_setup_generic() 259 free(evt_rsrc->evq.event_q_id); in l3fwd_rx_tx_adapter_setup_generic() 290 &evt_rsrc->evq.event_q_id[ in l3fwd_rx_tx_adapter_setup_generic() [all …]
|
| H A D | l3fwd_event_internal_port.c | 71 evt_rsrc->evq.nb_queues = event_d_conf.nb_event_queues; in l3fwd_event_device_setup_internal_port() 175 evt_rsrc->evq.event_q_id = (uint8_t *)malloc(sizeof(uint8_t) * in l3fwd_event_queue_setup_internal_port() 176 evt_rsrc->evq.nb_queues); in l3fwd_event_queue_setup_internal_port() 177 if (!evt_rsrc->evq.event_q_id) in l3fwd_event_queue_setup_internal_port() 180 for (event_q_id = 0; event_q_id < evt_rsrc->evq.nb_queues; in l3fwd_event_queue_setup_internal_port() 186 evt_rsrc->evq.event_q_id[event_q_id] = event_q_id; in l3fwd_event_queue_setup_internal_port() 216 free(evt_rsrc->evq.event_q_id); in l3fwd_rx_tx_adapter_setup_internal_port() 256 eth_q_conf.ev.queue_id = evt_rsrc->evq.event_q_id[q_id]; in l3fwd_rx_tx_adapter_setup_internal_port() 268 if (q_id < evt_rsrc->evq.nb_queues) in l3fwd_rx_tx_adapter_setup_internal_port() 278 free(evt_rsrc->evq.event_q_id); in l3fwd_rx_tx_adapter_setup_internal_port()
|
| H A D | l3fwd_lpm.c | 261 const uint8_t tx_q_id = evt_rsrc->evq.event_q_id[ in lpm_event_loop_single() 262 evt_rsrc->evq.nb_queues - 1]; in lpm_event_loop_single() 313 const uint8_t tx_q_id = evt_rsrc->evq.event_q_id[ in lpm_event_loop_burst() 314 evt_rsrc->evq.nb_queues - 1]; in lpm_event_loop_burst() 455 evt_rsrc->evq.event_q_id[evt_rsrc->evq.nb_queues - 1]; in lpm_event_loop_vector()
|
| H A D | l3fwd_em.c | 653 const uint8_t tx_q_id = evt_rsrc->evq.event_q_id[ in em_event_loop_single() 654 evt_rsrc->evq.nb_queues - 1]; in em_event_loop_single() 713 const uint8_t tx_q_id = evt_rsrc->evq.event_q_id[ in em_event_loop_burst() 714 evt_rsrc->evq.nb_queues - 1]; in em_event_loop_burst() 840 evt_rsrc->evq.event_q_id[evt_rsrc->evq.nb_queues - 1]; in em_event_loop_vector()
|
| H A D | l3fwd_fib.c | 250 const uint8_t tx_q_id = evt_rsrc->evq.event_q_id[ in fib_event_loop() 251 evt_rsrc->evq.nb_queues - 1]; in fib_event_loop() 493 evt_rsrc->evq.event_q_id[evt_rsrc->evq.nb_queues - 1]; in fib_event_loop_vector()
|
| H A D | l3fwd_event.h | 69 struct l3fwd_event_queues evq; member
|
| /dpdk/examples/l2fwd-event/ |
| H A D | l2fwd_event_generic.c | 148 evt_rsrc->evq.event_q_id, in l2fwd_event_port_setup_generic() 150 evt_rsrc->evq.nb_queues - 1); in l2fwd_event_port_setup_generic() 151 if (ret != (evt_rsrc->evq.nb_queues - 1)) in l2fwd_event_port_setup_generic() 182 evt_rsrc->evq.nb_queues); in l2fwd_event_queue_setup_generic() 183 if (!evt_rsrc->evq.event_q_id) in l2fwd_event_queue_setup_generic() 208 evt_rsrc->evq.event_q_id[event_q_id] = event_q_id; in l2fwd_event_queue_setup_generic() 233 free(evt_rsrc->evq.event_q_id); in l2fwd_rx_tx_adapter_setup_generic() 273 if (i < evt_rsrc->evq.nb_queues) in l2fwd_rx_tx_adapter_setup_generic() 298 free(evt_rsrc->evq.event_q_id); in l2fwd_rx_tx_adapter_setup_generic() 330 &evt_rsrc->evq.event_q_id[ in l2fwd_rx_tx_adapter_setup_generic() [all …]
|
| H A D | l2fwd_event_internal_port.c | 81 evt_rsrc->evq.nb_queues = event_d_conf.nb_event_queues; in l2fwd_event_device_setup_internal_port() 184 evt_rsrc->evq.event_q_id = (uint8_t *)malloc(sizeof(uint8_t) * in l2fwd_event_queue_setup_internal_port() 185 evt_rsrc->evq.nb_queues); in l2fwd_event_queue_setup_internal_port() 186 if (!evt_rsrc->evq.event_q_id) in l2fwd_event_queue_setup_internal_port() 189 for (event_q_id = 0; event_q_id < evt_rsrc->evq.nb_queues; in l2fwd_event_queue_setup_internal_port() 195 evt_rsrc->evq.event_q_id[event_q_id] = event_q_id; in l2fwd_event_queue_setup_internal_port() 225 free(evt_rsrc->evq.event_q_id); in l2fwd_rx_tx_adapter_setup_internal_port() 263 eth_q_conf.ev.queue_id = evt_rsrc->evq.event_q_id[q_id]; in l2fwd_rx_tx_adapter_setup_internal_port() 275 if (q_id < evt_rsrc->evq.nb_queues) in l2fwd_rx_tx_adapter_setup_internal_port() 285 free(evt_rsrc->evq.event_q_id); in l2fwd_rx_tx_adapter_setup_internal_port()
|
| H A D | l2fwd_event.c | 192 const uint8_t tx_q_id = evt_rsrc->evq.event_q_id[ in l2fwd_event_loop_single() 193 evt_rsrc->evq.nb_queues - 1]; in l2fwd_event_loop_single() 237 const uint8_t tx_q_id = evt_rsrc->evq.event_q_id[ in l2fwd_event_loop_burst() 238 evt_rsrc->evq.nb_queues - 1]; in l2fwd_event_loop_burst() 422 evt_rsrc->evq.event_q_id[evt_rsrc->evq.nb_queues - 1]; in l2fwd_event_loop_vector()
|
| H A D | l2fwd_event.h | 61 struct event_queues evq; member
|
| /dpdk/drivers/common/sfc_efx/base/ |
| H A D | ef10_ev.c | 262 __in uint32_t evq, in efx_mcdi_driver_event() argument 276 MCDI_IN_SET_DWORD(req, DRIVER_EVENT_IN_EVQ, evq); in efx_mcdi_driver_event()
|