Home
last modified time | relevance | path

Searched refs:q_depth (Results 1 – 17 of 17) sorted by relevance

/f-stack/dpdk/drivers/net/hinic/base/
H A Dhinic_pmd_wq.c48 if (q_depth & (q_depth - 1)) { in hinic_wq_allocate()
55 wq->wq_buf_size = ((u32)q_depth) << wqebb_shift; in hinic_wq_allocate()
56 wq->q_depth = q_depth; in hinic_wq_allocate()
60 q_depth); in hinic_wq_allocate()
72 wq->delta = q_depth; in hinic_wq_allocate()
73 wq->mask = q_depth - 1; in hinic_wq_allocate()
93 if ((wq->delta + num_wqebbs) > wq->q_depth) in hinic_read_wqe()
107 u16 q_depth) in hinic_cmdq_alloc() argument
116 wq[i].q_depth = q_depth; in hinic_cmdq_alloc()
126 wq[i].delta = q_depth; in hinic_cmdq_alloc()
[all …]
H A Dhinic_pmd_wq.h10 #define WQ_SIZE(wq) (u32)((u64)(wq)->q_depth * (wq)->wqebb_size)
98 u16 q_depth; member
119 u16 q_depth);
125 u32 wqebb_shift, u16 q_depth, unsigned int socket_id);
H A Dhinic_pmd_cmdq.c161 return ((wq->delta) == wq->q_depth ? true : false); in hinic_cmdq_idle()
496 errcode_size = wq->q_depth * sizeof(*cmdq->errcode); in init_cmdq()
504 cmd_infos_size = wq->q_depth * sizeof(*cmdq->cmd_infos); in init_cmdq()
801 if (next_prod_idx >= wq->q_depth) { in cmdq_sync_cmd_direct_resp()
803 next_prod_idx -= wq->q_depth; in cmdq_sync_cmd_direct_resp()
/f-stack/freebsd/contrib/ena-com/
H A Dena_eth_com.h111 return io_sq->q_depth - 1 - cnt; in ena_com_free_q_entries()
210 need_update = unreported_comp > (io_cq->q_depth / ENA_COMP_HEAD_THRESH); in ena_com_update_dev_comp_head()
248 if (unlikely((io_cq->head & (io_cq->q_depth - 1)) == 0)) in ena_com_cq_inc_head()
259 masked_head = io_cq->head & (io_cq->q_depth - 1); in ena_com_tx_comp_req_id_get()
277 if (unlikely(*req_id >= io_cq->q_depth)) { in ena_com_tx_comp_req_id_get()
H A Dena_eth_com.c43 head_masked = io_cq->head & (io_cq->q_depth - 1); in ena_com_get_next_rx_cdesc()
68 tail_masked = io_sq->tail & (io_sq->q_depth - 1); in get_sq_desc_regular_queue()
83 dst_tail_mask = io_sq->tail & (io_sq->q_depth - 1); in ena_com_write_bounce_buffer_to_dev()
112 if (unlikely((io_sq->tail & (io_sq->q_depth - 1)) == 0)) in ena_com_write_bounce_buffer_to_dev()
248 if (unlikely((io_sq->tail & (io_sq->q_depth - 1)) == 0)) in ena_com_sq_update_tail()
257 idx &= (io_cq->q_depth - 1); in ena_com_rx_cdesc_idx_to_ptr()
285 head_masked = io_cq->head & (io_cq->q_depth - 1); in ena_com_cdesc_rx_pkt_get()
582 u16 q_depth = io_cq->q_depth; in ena_com_rx_pkt() local
613 if (unlikely(ena_buf[i].req_id >= q_depth)) in ena_com_rx_pkt()
H A Dena_com.c181 aenq->head = aenq->q_depth; in ena_com_admin_init_aenq()
220 command_id, admin_queue->q_depth); in get_comp_ctxt()
255 queue_size_mask = admin_queue->q_depth - 1; in __ena_com_submit_admin_cmd()
261 if (cnt >= admin_queue->q_depth) { in __ena_com_submit_admin_cmd()
317 for (i = 0; i < admin_queue->q_depth; i++) { in ena_com_init_comp_ctxt()
1307 create_cmd.sq_depth = io_sq->q_depth; in ena_com_create_io_sq()
1454 create_cmd.cq_depth = io_cq->q_depth; in ena_com_create_io_cq()
1587 u16 depth = ena_dev->aenq.q_depth; in ena_com_admin_aenq_enable()
1748 size = ADMIN_AENQ_SIZE(aenq->q_depth); in ena_com_admin_destroy()
1946 io_cq->q_depth = ctx->queue_size; in ena_com_create_io_queue()
[all …]
H A Dena_com.h143 u16 q_depth; member
188 u16 q_depth; member
240 u16 q_depth; member
271 u16 q_depth; member
H A Dena_plat.h245 for (i = 0; i < admin_queue->q_depth; i++) { \
/f-stack/dpdk/drivers/net/ena/base/
H A Dena_eth_com.h83 return io_sq->q_depth - 1 - cnt; in ena_com_free_q_entries()
179 need_update = unreported_comp > (io_cq->q_depth / ENA_COMP_HEAD_THRESH); in ena_com_update_dev_comp_head()
216 if (unlikely((io_cq->head & (io_cq->q_depth - 1)) == 0)) in ena_com_cq_inc_head()
227 masked_head = io_cq->head & (io_cq->q_depth - 1); in ena_com_tx_comp_req_id_get()
245 if (unlikely(*req_id >= io_cq->q_depth)) { in ena_com_tx_comp_req_id_get()
H A Dena_com.c86 u16 size = ADMIN_SQ_SIZE(queue->q_depth); in ena_com_admin_init_sq()
108 u16 size = ADMIN_CQ_SIZE(queue->q_depth); in ena_com_admin_init_cq()
143 aenq->head = aenq->q_depth; in ena_com_admin_init_aenq()
181 command_id, queue->q_depth); in get_comp_ctxt()
220 if (cnt >= admin_queue->q_depth) { in __ena_com_submit_admin_cmd()
275 for (i = 0; i < queue->q_depth; i++) { in ena_com_init_comp_ctxt()
1257 create_cmd.sq_depth = io_sq->q_depth; in ena_com_create_io_sq()
1402 create_cmd.cq_depth = io_cq->q_depth; in ena_com_create_io_cq()
1535 u16 depth = ena_dev->aenq.q_depth; in ena_com_admin_aenq_enable()
1884 io_cq->q_depth = ctx->queue_size; in ena_com_create_io_queue()
[all …]
H A Dena_eth_com.c15 head_masked = io_cq->head & (io_cq->q_depth - 1); in ena_com_get_next_rx_cdesc()
40 tail_masked = io_sq->tail & (io_sq->q_depth - 1); in get_sq_desc_regular_queue()
55 dst_tail_mask = io_sq->tail & (io_sq->q_depth - 1); in ena_com_write_bounce_buffer_to_dev()
82 if (unlikely((io_sq->tail & (io_sq->q_depth - 1)) == 0)) in ena_com_write_bounce_buffer_to_dev()
213 if (unlikely((io_sq->tail & (io_sq->q_depth - 1)) == 0)) in ena_com_sq_update_tail()
222 idx &= (io_cq->q_depth - 1); in ena_com_rx_cdesc_idx_to_ptr()
250 head_masked = io_cq->head & (io_cq->q_depth - 1); in ena_com_cdesc_rx_pkt_get()
H A Dena_com.h115 u16 q_depth; member
160 u16 q_depth; member
212 u16 q_depth; member
243 u16 q_depth; member
/f-stack/dpdk/drivers/net/hinic/
H A Dhinic_pmd_rx.c311 cqe_mem_size = sizeof(struct hinic_rq_cqe) * rxq->q_depth; in hinic_rx_alloc_cqe()
328 cqe_mem_size = sizeof(struct hinic_rq_cqe) * rxq->q_depth; in hinic_rx_free_cqe()
344 for (i = 0; i < rxq->q_depth; i++) { in hinic_rx_fill_wqe()
368 rx_info_sz = rxq->q_depth * sizeof(*rxq->rx_info); in hinic_setup_rx_resources()
381 if (pkts != rxq->q_depth) { in hinic_setup_rx_resources()
746 while (free_wqebbs++ < rxq->q_depth) { in hinic_free_all_rx_mbufs()
918 exp_wqebbs = rxq->q_depth - pi; in hinic_rearm_rxq_mbuf()
H A Dhinic_pmd_rx.h83 u16 q_depth; member
H A Dhinic_pmd_tx.h117 u16 q_depth; member
H A Dhinic_pmd_tx.c191 #define HINIC_IS_SQ_EMPTY(txq) (((txq)->wq->delta) == ((txq)->q_depth))
691 if (unlikely(end_pi >= txq->q_depth)) { in hinic_get_sq_wqe()
696 if (unlikely(end_pi > txq->q_depth)) { in hinic_get_sq_wqe()
698 remain_wqebbs = txq->q_depth - cur_pi; in hinic_get_sq_wqe()
1224 while (free_wqebbs < txq->q_depth) { in hinic_free_all_tx_mbufs()
1281 tx_info_sz = txq->q_depth * sizeof(*txq->tx_info); in hinic_setup_tx_resources()
H A Dhinic_pmd_ethdev.c465 rxq->q_depth = rq_depth; in hinic_rx_queue_setup()
471 rxq->rxinfo_align_end = rxq->q_depth - rxq->rx_free_thresh; in hinic_rx_queue_setup()
512 rxq->wq->delta = rxq->q_depth; in hinic_reset_rx_queue()
513 rxq->wq->mask = rxq->q_depth - 1; in hinic_reset_rx_queue()
604 txq->q_depth = sq_depth; in hinic_tx_queue_setup()
657 txq->wq->delta = txq->q_depth; in hinic_reset_tx_queue()
658 txq->wq->mask = txq->q_depth - 1; in hinic_reset_tx_queue()
2263 qinfo->nb_desc = rxq->q_depth; in hinic_rxq_info_get()
2271 qinfo->nb_desc = txq->q_depth; in hinic_txq_info_get()