Home
last modified time | relevance | path

Searched refs:q (Results 1 – 25 of 172) sorted by relevance

1234567

/dpdk/drivers/net/fm10k/
H A Dfm10k_rxtx.c179 q->next_dd = (q->next_dd + q->nb_desc - count) % in fm10k_recv_pkts()
185 for (; q->next_alloc <= q->next_trigger; ++q->next_alloc) { in fm10k_recv_pkts()
186 mbuf = q->sw_ring[q->next_alloc]; in fm10k_recv_pkts()
194 q->hw_ring[q->next_alloc] = desc; in fm10k_recv_pkts()
337 q->next_dd = (q->next_dd + q->nb_desc - count) % in fm10k_recv_scattered_pkts()
343 for (; q->next_alloc <= q->next_trigger; ++q->next_alloc) { in fm10k_recv_scattered_pkts()
548 if (q->last_free == q->nb_desc) in tx_free_descriptors()
591 q->sw_ring[q->next_free] = mb; in tx_xmit_pkt()
594 q->hw_ring[q->next_free].buflen = in tx_xmit_pkt()
612 if (++q->next_free == q->nb_desc) in tx_xmit_pkt()
[all …]
H A Dfm10k_ethdev.c204 diag = rte_mempool_get_bulk(q->mp, (void **)q->sw_ring, q->nb_desc); in rx_queue_reset()
216 q->hw_ring[i].q.pkt_addr = dma_addr; in rx_queue_reset()
217 q->hw_ring[i].q.hdr_addr = dma_addr; in rx_queue_reset()
225 q->sw_ring[q->nb_desc + i] = &q->fake_mbuf; in rx_queue_reset()
226 q->hw_ring[q->nb_desc + i] = zero; in rx_queue_reset()
256 q->hw_ring[q->nb_desc + i] = zero; in rx_queue_clean()
329 q->nb_free = q->nb_desc - 1; in tx_queue_reset()
330 fifo_reset(&q->rs_tracker, (q->nb_desc + 1) / q->rs_thresh); in tx_queue_reset()
878 q->ops->reset(q); in fm10k_dev_tx_queue_start()
1252 for (q = 0; q < FM10K_MAX_QUEUES_PF; q++) { in fm10k_xstats_get_names()
[all …]
/dpdk/drivers/net/cxgbe/
H A Dsge.c238 if (++q->cidx == q->size) in unmap_rx_buf()
603 #define Q_IDXDIFF(q, idx) IDXDIFF((q)->pidx, (q)->idx, (q)->size) argument
604 #define R_IDXDIFF(q, idx) IDXDIFF((q)->cidx, (q)->idx, (q)->size) argument
644 q->db_pidx = q->pidx; in ring_tx_db()
673 q->dbidx = q->pidx; in ring_tx_db()
722 q->pidx -= q->size; in txq_advance()
1328 q->full = txq_avail(&q->q) < ndesc ? 1 : 0; in ctrl_xmit()
1334 wr = (struct fw_wr_hdr *)&q->q.desc[q->q.pidx]; in ctrl_xmit()
1345 ring_tx_db(q->adapter, &q->q); in ctrl_xmit()
1602 ret = q->handler(q, q->cur_desc, NULL); in process_responses()
[all …]
/dpdk/drivers/common/cnxk/
H A Droc_nix_irq.c266 for (q = 0; q < nix->nb_rx_queues; q++) { in nix_lf_q_irq()
278 for (q = 0; q < nix->nb_rx_queues; q++) { in nix_lf_q_irq()
293 for (q = 0; q < nix->nb_tx_queues; q++) { in nix_lf_q_irq()
348 for (q = 0; q < qs; q++) { in roc_nix_register_queue_irqs()
358 nix->qints_mem[q].qintx = q; in roc_nix_register_queue_irqs()
383 int vec, q; in roc_nix_unregister_queue_irqs() local
388 for (q = 0; q < nix->configured_qints; q++) { in roc_nix_unregister_queue_irqs()
425 for (q = 0; q < nix->configured_cints; q++) { in roc_nix_register_cq_irqs()
435 nix->cints_mem[q].qintx = q; in roc_nix_register_cq_irqs()
481 int vec, q; in roc_nix_unregister_cq_irqs() local
[all …]
H A Droc_nix_inl_dev_irq.c191 int rc, q; in nix_inl_nix_q_irq() local
201 for (q = 0; q < inl_dev->nb_rqs; q++) { in nix_inl_nix_q_irq()
227 for (q = 0; q < inl_dev->nb_rqs; q++) { in nix_inl_nix_q_irq()
245 int rc, q; in nix_inl_nix_ras_irq() local
259 for (q = 0; q < inl_dev->nb_rqs; q++) { in nix_inl_nix_ras_irq()
277 int rc, q; in nix_inl_nix_err_irq() local
292 for (q = 0; q < inl_dev->nb_rqs; q++) { in nix_inl_nix_err_irq()
346 for (q = 0; q < qints; q++) { in nix_inl_nix_register_irqs()
363 qints_mem[q].qint = q; in nix_inl_nix_register_irqs()
377 int q; in nix_inl_nix_unregister_irqs() local
[all …]
H A Droc_npa_irq.c143 uint32_t q, pool, aura; in npa_q_irq() local
153 for (q = 0; q < lf->nr_pools; q++) { in npa_q_irq()
158 pool = q % lf->qints; in npa_q_irq()
172 for (q = 0; q < lf->nr_pools; q++) { in npa_q_irq()
177 aura = q % lf->qints; in npa_q_irq()
207 for (q = 0; q < qs; q++) { in npa_register_queue_irqs()
218 qintmem += q; in npa_register_queue_irqs()
221 qintmem->qintx = q; in npa_register_queue_irqs()
244 int vec, q, qs; in npa_unregister_queue_irqs() local
249 for (q = 0; q < qs; q++) { in npa_unregister_queue_irqs()
[all …]
H A Droc_npa_debug.c95 uint32_t q; in roc_npa_ctx_dump() local
102 for (q = 0; q < lf->nr_pools; q++) { in roc_npa_ctx_dump()
104 if (plt_bitmap_get(lf->npa_bmp, q)) in roc_npa_ctx_dump()
110 aq->aura_id = q; in roc_npa_ctx_dump()
116 plt_err("Failed to get pool(%d) context", q); in roc_npa_ctx_dump()
119 npa_dump("============== pool=%d ===============\n", q); in roc_npa_ctx_dump()
123 for (q = 0; q < lf->nr_pools; q++) { in roc_npa_ctx_dump()
125 if (plt_bitmap_get(lf->npa_bmp, q)) in roc_npa_ctx_dump()
131 aq->aura_id = q; in roc_npa_ctx_dump()
137 plt_err("Failed to get aura(%d) context", q); in roc_npa_ctx_dump()
[all …]
/dpdk/drivers/common/cpt/
H A Dcpt_common.h68 q->rid_queue[(q->tail + off) & (qsize - 1)] = rid; in pending_queue_push()
72 pending_queue_commit(struct pending_queue *q, unsigned int cnt, in pending_queue_commit() argument
78 q->tail = (q->tail + cnt) & (qsize - 1); in pending_queue_commit()
82 pending_queue_pop(struct pending_queue *q, const int qsize) in pending_queue_pop() argument
86 q->head = (q->head + 1) & (qsize - 1); in pending_queue_pop()
96 *rid = q->rid_queue[q->head]; in pending_queue_peek()
99 next_rid = q->rid_queue[(q->head + 1) & (qsize - 1)]; in pending_queue_peek()
105 pending_queue_level(struct pending_queue *q, const int qsize) in pending_queue_level() argument
107 return (q->tail - q->head) & (qsize - 1); in pending_queue_level()
111 pending_queue_free_slots(struct pending_queue *q, const int qsize, in pending_queue_free_slots() argument
[all …]
/dpdk/drivers/net/ionic/
H A Dionic_rxtx.c61 struct ionic_queue *q = &txq->qcq.q; in ionic_txq_info_get() local
72 struct ionic_queue *q = &txq->qcq.q; in ionic_tx_flush() local
97 info = IONIC_INFO_PTR(q, q->tail_idx); in ionic_tx_flush()
99 q->tail_idx = Q_NEXT_TO_SRVC(q, 1); in ionic_tx_flush()
104 rte_prefetch0(&q->info[q->tail_idx]); in ionic_tx_flush()
321 q->head_idx = Q_NEXT_TO_POST(q, 1); in ionic_tx_tso_post()
327 struct ionic_queue *q = &txq->qcq.q; in ionic_tx_tso_next() local
340 struct ionic_queue *q = &txq->qcq.q; in ionic_tx_tso() local
463 struct ionic_queue *q = &txq->qcq.q; in ionic_tx() local
524 q->head_idx = Q_NEXT_TO_POST(q, 1); in ionic_tx()
[all …]
H A Dionic_dev.c340 struct ionic_queue *q = &qcq->q; in ionic_dev_cmd_adminq_init() local
345 .q_init.type = q->type, in ionic_dev_cmd_adminq_init()
419 q->index = index; in ionic_q_init()
420 q->num_descs = num_descs; in ionic_q_init()
421 q->size_mask = num_descs - 1; in ionic_q_init()
422 q->head_idx = 0; in ionic_q_init()
423 q->tail_idx = 0; in ionic_q_init()
431 q->base = base; in ionic_q_map()
432 q->base_pa = base_pa; in ionic_q_map()
438 q->sg_base = base; in ionic_q_sg_map()
[all …]
H A Dionic_lif.c36 struct ionic_queue *q = &qcq->q; in ionic_qcq_enable() local
42 .type = q->type, in ionic_qcq_enable()
54 struct ionic_queue *q = &qcq->q; in ionic_qcq_disable() local
1340 struct ionic_queue *q = &aqcq->qcq.q; in ionic_lif_adminq_init() local
1353 q->db = ionic_db_map(lif, q); in ionic_lif_adminq_init()
1369 struct ionic_queue *q = &nqcq->qcq.q; in ionic_lif_notifyq_init() local
1399 q->db = NULL; in ionic_lif_notifyq_init()
1475 struct ionic_queue *q = &qcq->q; in ionic_lif_txq_init() local
1508 q->db = ionic_db_map(lif, q); in ionic_lif_txq_init()
1523 struct ionic_queue *q = &qcq->q; in ionic_lif_rxq_init() local
[all …]
H A Dionic_dev.h215 struct ionic_queue *q);
225 void ionic_q_map(struct ionic_queue *q, void *base, rte_iova_t base_pa);
226 void ionic_q_sg_map(struct ionic_queue *q, void *base, rte_iova_t base_pa);
229 ionic_q_space_avail(struct ionic_queue *q) in ionic_q_space_avail() argument
231 uint16_t avail = q->tail_idx; in ionic_q_space_avail()
233 if (q->head_idx >= avail) in ionic_q_space_avail()
234 avail += q->num_descs - q->head_idx - 1; in ionic_q_space_avail()
236 avail -= q->head_idx + 1; in ionic_q_space_avail()
242 ionic_q_flush(struct ionic_queue *q) in ionic_q_flush() argument
244 uint64_t val = IONIC_DBELL_QID(q->hw_index) | q->head_idx; in ionic_q_flush()
[all …]
/dpdk/drivers/baseband/fpga_lte_fec/
H A Dfpga_lte_fec.c784 q->head_free_desc = q->tail; in fpga_queue_setup()
852 q->tail = q->head_free_desc = 0; in fpga_queue_start()
922 if (q != NULL && q->q_idx == q_idx) in get_queue_id()
1075 q->tail = (q->tail + num_desc) & q->sw_ring_wrap_mask; in fpga_dma_enqueue()
1957 q->head_free_desc - q->tail - 1 : in fpga_enqueue_enc()
1958 q->ring_ctrl_reg.ring_size + q->head_free_desc - q->tail - 1; in fpga_enqueue_enc()
1990 q->head_free_desc, q->tail); in fpga_enqueue_enc()
2027 q->head_free_desc - q->tail - 1 : in fpga_enqueue_dec()
2028 q->ring_ctrl_reg.ring_size + q->head_free_desc - q->tail - 1; in fpga_enqueue_dec()
2060 q->head_free_desc, q->tail); in fpga_enqueue_dec()
[all …]
/dpdk/drivers/baseband/fpga_5gnr_fec/
H A Drte_fpga_5gnr_fec.c499 q->ring_ctrl_reg.shadow_tail = q->tail; in fpga_queue_setup()
512 q->head_free_desc = q->tail; in fpga_queue_setup()
580 q->tail = q->head_free_desc = 0; in fpga_queue_start()
651 if (q != NULL && q->q_idx == q_idx) in get_queue_id()
804 q->tail = (q->tail + num_desc) & q->sw_ring_wrap_mask; in fpga_dma_enqueue()
1587 q->head_free_desc - q->tail - 1 : in fpga_enqueue_ldpc_enc()
1588 q->ring_ctrl_reg.ring_size + q->head_free_desc - q->tail - 1; in fpga_enqueue_ldpc_enc()
1608 q->head_free_desc, q->tail); in fpga_enqueue_ldpc_enc()
1644 q->head_free_desc - q->tail - 1 : in fpga_enqueue_ldpc_dec()
1645 q->ring_ctrl_reg.ring_size + q->head_free_desc - q->tail - 1; in fpga_enqueue_ldpc_dec()
[all …]
/dpdk/drivers/baseband/turbo_sw/
H A Dbbdev_turbo_software.c267 if (q != NULL) { in q_release()
271 rte_free(q->ag); in q_release()
276 rte_free(q); in q_release()
295 q = rte_zmalloc_socket(RTE_STR(DRIVER_NAME), sizeof(*q), in q_setup()
297 if (q == NULL) { in q_setup()
475 rte_free(q->ag); in q_setup()
480 rte_free(q); in q_setup()
600 in = q->enc_in; in process_enc_cb()
823 RTE_SET_USED(q); in process_enc_cb()
970 RTE_SET_USED(q); in process_ldpc_enc_cb()
[all …]
/dpdk/drivers/net/fm10k/base/
H A Dfm10k_common.c360 &q->tx_packets); in fm10k_update_hw_stats_tx_q()
365 &q->tx_bytes); in fm10k_update_hw_stats_tx_q()
377 if (q->tx_stats_idx == id_tx) { in fm10k_update_hw_stats_tx_q()
379 q->tx_bytes.count += tx_bytes; in fm10k_update_hw_stats_tx_q()
386 q->tx_stats_idx = id_tx; in fm10k_update_hw_stats_tx_q()
413 &q->rx_drops); in fm10k_update_hw_stats_rx_q()
416 &q->rx_packets); in fm10k_update_hw_stats_rx_q()
421 &q->rx_bytes); in fm10k_update_hw_stats_rx_q()
444 q->rx_stats_idx = id_rx; in fm10k_update_hw_stats_rx_q()
485 q->rx_stats_idx = 0; in fm10k_unbind_hw_stats_q()
[all …]
/dpdk/drivers/baseband/acc100/
H A Drte_acc100_pmd.c932 q->vf_id, q->qgrp_id, q->aq_id)); in acc100_queue_setup()
937 q->aq_id, q->aq_depth, q->mmio_reg_enqueue); in acc100_queue_setup()
3277 int32_t avail = q->sw_ring_depth + q->sw_ring_tail - q->sw_ring_head; in acc100_enqueue_enc_cb()
3333 int32_t avail = q->sw_ring_depth + q->sw_ring_tail - q->sw_ring_head; in acc100_enqueue_ldpc_enc_cb()
3384 int32_t avail = q->sw_ring_depth + q->sw_ring_tail - q->sw_ring_head; in acc100_enqueue_enc_tb()
3446 int32_t avail = q->sw_ring_depth + q->sw_ring_tail - q->sw_ring_head; in acc100_enqueue_dec_cb()
3499 int32_t avail = q->sw_ring_depth + q->sw_ring_tail - q->sw_ring_head; in acc100_enqueue_ldpc_dec_tb()
3532 int32_t avail = q->sw_ring_depth + q->sw_ring_tail - q->sw_ring_head; in acc100_enqueue_ldpc_dec_cb()
3582 int32_t avail = q->sw_ring_depth + q->sw_ring_tail - q->sw_ring_head; in acc100_enqueue_dec_tb()
3629 (q->aq_dequeued - q->aq_enqueued) / 128; in acc100_enqueue_ldpc_dec()
[all …]
/dpdk/examples/vmdq/
H A Dmain.c183 uint16_t q; in port_init() local
283 for (q = 0; q < rxRings; q++) { in port_init()
294 for (q = 0; q < txRings; q++) { in port_init()
326 for (q = 0; q < num_pools; q++) { in port_init()
330 mac.addr_bytes[5] = q; in port_init()
334 q + vmdq_pool_base); in port_init()
475 for (; q < num_queues; q++) { in sighup_handler()
495 uint16_t q, i, p; in lcore_main() local
546 for (q = startQueue; q < endQueue; q++) { in lcore_main()
548 q, buf, buf_size); in lcore_main()
[all …]
/dpdk/examples/vmdq_dcb/
H A Dmain.c197 uint16_t q; in port_init() local
309 for (q = 0; q < num_queues; q++) { in port_init()
322 for (q = 0; q < num_queues; q++) { in port_init()
350 for (q = 0; q < num_pools; q++) { in port_init()
355 mac.addr_bytes[5] = q; in port_init()
359 q + vmdq_pool_base); in port_init()
527 for (; q < num_queues; q++) { in sighup_handler()
546 uint16_t q, i, p; in lcore_main() local
585 for (q = startQueue; q < endQueue; q++) { in lcore_main()
587 q, buf, buf_size); in lcore_main()
[all …]
/dpdk/drivers/net/kni/
H A Drte_eth_kni.c258 struct pmd_queue *q; in eth_kni_rx_queue_setup() local
262 q->mb_pool = mb_pool; in eth_kni_rx_queue_setup()
277 struct pmd_queue *q; in eth_kni_tx_queue_setup() local
301 struct pmd_queue *q; in eth_kni_stats_get() local
306 q = data->rx_queues[i]; in eth_kni_stats_get()
316 q = data->tx_queues[i]; in eth_kni_stats_get()
335 struct pmd_queue *q; in eth_kni_stats_reset() local
340 q->rx.pkts = 0; in eth_kni_stats_reset()
341 q->rx.bytes = 0; in eth_kni_stats_reset()
345 q->tx.pkts = 0; in eth_kni_stats_reset()
[all …]
/dpdk/drivers/net/cnxk/
H A Dcnxk_stats.c116 for (q = 0; q < dev->nb_rxq; q++) { in cnxk_nix_xstats_get()
140 for (q = 0; q < dev->nb_txq; q++) { in cnxk_nix_xstats_get()
173 int roc_size, size, i, q; in cnxk_nix_xstats_get_names() local
192 for (q = 0; q < dev->nb_rxq; q++) { in cnxk_nix_xstats_get_names()
194 "rxq_%d_pkts", q); in cnxk_nix_xstats_get_names()
197 "rxq_%d_octs", q); in cnxk_nix_xstats_get_names()
200 "rxq_%d_drop_pkts", q); in cnxk_nix_xstats_get_names()
206 "rxq_%d_err_pkts", q); in cnxk_nix_xstats_get_names()
210 for (q = 0; q < dev->nb_txq; q++) { in cnxk_nix_xstats_get_names()
212 "txq_%d_pkts", q); in cnxk_nix_xstats_get_names()
[all …]
/dpdk/drivers/baseband/null/
H A Dbbdev_null.c95 if (q != NULL) { in q_release()
96 rte_ring_free(q->processed_pkts); in q_release()
97 rte_free(q); in q_release()
111 struct bbdev_queue *q; in q_setup() local
117 q = rte_zmalloc_socket(RTE_STR(DRIVER_NAME), sizeof(*q), in q_setup()
119 if (q == NULL) { in q_setup()
126 if (q->processed_pkts == NULL) { in q_setup()
131 dev->data->queues[q_id].queue_private = q; in q_setup()
136 rte_free(q); in q_setup()
151 struct bbdev_queue *q = q_data->queue_private; in enqueue_dec_ops() local
[all …]
/dpdk/drivers/net/mvneta/
H A Dmvneta_rxtx.c421 neta_ppio_send(q->priv->ppio, q->queue_id, descs, &nb_pkts); in mvneta_tx_pkt_burst()
549 neta_ppio_send_sg(q->priv->ppio, q->queue_id, descs, &total_descs, in mvneta_tx_sg_pkt_burst()
617 if (unlikely(!q || !q->priv->ppio)) in mvneta_rx_pkt_burst()
620 ret = neta_ppio_recv(q->priv->ppio, q->queue_id, in mvneta_rx_pkt_burst()
647 q->drop_mac++; in mvneta_rx_pkt_burst()
674 ret = mvneta_buffs_alloc(q->priv, q, &buf_to_refill); in mvneta_rx_pkt_burst()
808 if (!q) in mvneta_tx_queue_release()
811 rte_free(q); in mvneta_tx_queue_release()
973 if (!q) in mvneta_rx_queue_release()
981 if (q->priv->ppio) in mvneta_rx_queue_release()
[all …]
/dpdk/drivers/net/octeontx_ep/
H A Dotx_ep_ethdev.c56 unsigned int q; in otx_ep_dev_start() local
67 for (q = 0; q < otx_epvf->nb_rx_queues; q++) { in otx_ep_dev_start()
68 rte_write32(otx_epvf->droq[q]->nb_desc, in otx_ep_dev_start()
69 otx_epvf->droq[q]->pkts_credit_reg); in otx_ep_dev_start()
72 otx_ep_info("OQ[%d] dbells [%d]\n", q, in otx_ep_dev_start()
356 uint32_t num_queues, q; in otx_epdev_exit() local
365 for (q = 0; q < num_queues; q++) { in otx_epdev_exit()
366 if (otx_ep_delete_oqs(otx_epvf, q)) { in otx_epdev_exit()
367 otx_ep_err("Failed to delete OQ:%d\n", q); in otx_epdev_exit()
374 for (q = 0; q < num_queues; q++) { in otx_epdev_exit()
[all …]
/dpdk/lib/sched/
H A Drte_approx.c56 *q = q_b + k * q_a; in find_exact_solution_left()
68 *q = q_b + k * q_a; in find_exact_solution_right()
142 int rte_approx(double alpha, double d, uint32_t *p, uint32_t *q) in rte_approx() argument
151 if ((p == NULL) || (q == NULL)) { in rte_approx()
205 *q = q_b + k * q_a; in find_exact_solution_left_64()
217 *q = q_b + k * q_a; in find_exact_solution_right_64()
222 uint64_t denum, uint64_t *p, uint64_t *q) in find_best_rational_approximation_64() argument
254 alpha_num, d_num, denum, p, q); in find_best_rational_approximation_64()
280 alpha_num, d_num, denum, p, q); in find_best_rational_approximation_64()
297 int rte_approx_64(double alpha, double d, uint64_t *p, uint64_t *q) in rte_approx_64() argument
[all …]

1234567