Searched refs:mcq (Results 1 – 9 of 9) sorted by relevance
611 rxq->mcq.buf = dv_cq.buf.buf; in mlx4_rxq_attach()612 rxq->mcq.cqe_cnt = dv_cq.cqe_cnt; in mlx4_rxq_attach()613 rxq->mcq.set_ci_db = dv_cq.set_ci_db; in mlx4_rxq_attach()614 rxq->mcq.cqe_64 = (dv_cq.cqe_size & 64) ? 1 : 0; in mlx4_rxq_attach()615 rxq->mcq.arm_db = dv_cq.arm_db; in mlx4_rxq_attach()616 rxq->mcq.arm_sn = dv_cq.arm_sn; in mlx4_rxq_attach()617 rxq->mcq.cqn = dv_cq.cqn; in mlx4_rxq_attach()618 rxq->mcq.cq_uar = dv_cq.cq_uar; in mlx4_rxq_attach()619 rxq->mcq.cq_db_reg = (uint8_t *)dv_cq.cq_uar + MLX4_CQ_DOORBELL; in mlx4_rxq_attach()657 memset(&rxq->mcq, 0, sizeof(rxq->mcq)); in mlx4_rxq_detach()
58 struct mlx4_cq mcq; /**< Info for directly manipulating the CQ. */ member99 struct mlx4_cq mcq; /**< Info for directly manipulating the CQ. */ member
225 struct mlx4_cq *cq = &rxq->mcq; in mlx4_arm_cq()344 rxq->mcq.arm_sn++; in mlx4_rx_intr_disable()
308 struct mlx4_cq *cq = &txq->mcq; in mlx4_txq_complete()1185 struct mlx4_cq *cq = &rxq->mcq; in mlx4_cq_poll_one()1339 *rxq->mcq.set_ci_db = in mlx4_rx_burst()1340 rte_cpu_to_be_32(rxq->mcq.cons_index & MLX4_CQ_DB_CI_MASK); in mlx4_rx_burst()
210 struct mlx4_cq *cq = &txq->mcq; in mlx4_txq_fill_dv_obj_info()
74 volatile struct mlx5_mini_cqe8 *mcq = (void *)&(cq + 1)->pkt_info; in rxq_cq_decompress_v() local135 uint8_t *p = (void *)&mcq[pos % 8]; in rxq_cq_decompress_v()188 :[mcq]"r"(p), in rxq_cq_decompress_v()237 : [mcq]"r"(p), in rxq_cq_decompress_v()271 mcq[pos % 8].hdr_type; in rxq_cq_decompress_v()273 mcq[pos % 8 + 1].hdr_type; in rxq_cq_decompress_v()275 mcq[pos % 8 + 2].hdr_type; in rxq_cq_decompress_v()277 mcq[pos % 8 + 3].hdr_type; in rxq_cq_decompress_v()360 mcq = (void *)&(cq + pos)->pkt_info; in rxq_cq_decompress_v()
79 volatile struct mlx5_mini_cqe8 *mcq = (void *)&(cq + 1)->pkt_info; in rxq_cq_decompress_v() local162 (signed int const *)&mcq[pos % 8]); in rxq_cq_decompress_v()164 (signed int const *)&mcq[pos % 8 + 2]); in rxq_cq_decompress_v()369 mcq[pos % 8].hdr_type; in rxq_cq_decompress_v()371 mcq[pos % 8 + 1].hdr_type; in rxq_cq_decompress_v()373 mcq[pos % 8 + 2].hdr_type; in rxq_cq_decompress_v()375 mcq[pos % 8 + 3].hdr_type; in rxq_cq_decompress_v()494 mcq = (void *)&(cq + pos)->pkt_info; in rxq_cq_decompress_v()
76 volatile struct mlx5_mini_cqe8 *mcq = (void *)(cq + 1); in rxq_cq_decompress_v() local138 mcqe1 = _mm_loadu_si128((__m128i *)&mcq[pos % 8]); in rxq_cq_decompress_v()139 mcqe2 = _mm_loadu_si128((__m128i *)&mcq[pos % 8 + 2]); in rxq_cq_decompress_v()350 mcq = (void *)(cq + pos); in rxq_cq_decompress_v()
356 struct dpaa2_queue *mc_q, *mcq; in dpaa2_alloc_rx_tx_queues() local426 mcq = (struct dpaa2_queue *)priv->rx_vq[vq_id]; in dpaa2_alloc_rx_tx_queues()427 mcq->tc_index = dist_idx / num_rxqueue_per_tc; in dpaa2_alloc_rx_tx_queues()428 mcq->flow_id = dist_idx % num_rxqueue_per_tc; in dpaa2_alloc_rx_tx_queues()