| /f-stack/dpdk/drivers/net/mlx4/ |
| H A D | mlx4_rxtx.h | 50 struct mlx4_mr_ctrl mr_ctrl; /* MR control descriptor. */ member 106 struct mlx4_mr_ctrl mr_ctrl; /* MR control descriptor. */ member 168 void mlx4_mr_flush_local_cache(struct mlx4_mr_ctrl *mr_ctrl); 207 struct mlx4_mr_ctrl *mr_ctrl = &rxq->mr_ctrl; in mlx4_rx_addr2mr() local 211 lkey = mlx4_mr_lookup_cache(mr_ctrl->cache, &mr_ctrl->mru, in mlx4_rx_addr2mr() 235 struct mlx4_mr_ctrl *mr_ctrl = &txq->mr_ctrl; in mlx4_tx_mb2mr() local 240 if (unlikely(*mr_ctrl->dev_gen_ptr != mr_ctrl->cur_gen)) in mlx4_tx_mb2mr() 241 mlx4_mr_flush_local_cache(mr_ctrl); in mlx4_tx_mb2mr() 243 lkey = mlx4_mr_lookup_cache(mr_ctrl->cache, &mr_ctrl->mru, in mlx4_tx_mb2mr()
|
| H A D | mlx4_mr.c | 1102 mr_ctrl->mru = mr_ctrl->head; in mlx4_mr_addr2mr_bh() 1104 mr_ctrl->head = (mr_ctrl->head + 1) % MLX4_MR_CACHE_N; in mlx4_mr_addr2mr_bh() 1122 struct mlx4_mr_ctrl *mr_ctrl = &rxq->mr_ctrl; in mlx4_rx_addr2mr_bh() local 1142 struct mlx4_mr_ctrl *mr_ctrl = &txq->mr_ctrl; in mlx4_tx_addr2mr_bh() local 1187 memset(mr_ctrl->cache, 0, sizeof(mr_ctrl->cache)); in mlx4_mr_flush_local_cache() 1192 mr_ctrl->cur_gen = *mr_ctrl->dev_gen_ptr; in mlx4_mr_flush_local_cache() 1194 (void *)mr_ctrl, mr_ctrl->cur_gen); in mlx4_mr_flush_local_cache() 1218 struct mlx4_mr_ctrl *mr_ctrl = data->mr_ctrl; in mlx4_mr_update_ext_mp_cb() local 1293 .mr_ctrl = mr_ctrl, in mlx4_mr_update_ext_mp() 1318 struct mlx4_mr_ctrl *mr_ctrl = &txq->mr_ctrl; in mlx4_tx_update_ext_mp() local [all …]
|
| H A D | mlx4_txq.c | 475 if (mlx4_mr_btree_init(&txq->mr_ctrl.cache_bh, in mlx4_tx_queue_setup() 481 txq->mr_ctrl.dev_gen_ptr = &priv->mr.dev_gen; in mlx4_tx_queue_setup() 524 mlx4_mr_btree_free(&txq->mr_ctrl.cache_bh); in mlx4_tx_queue_release()
|
| H A D | mlx4_mr.h | 81 int mlx4_mr_update_mp(struct rte_eth_dev *dev, struct mlx4_mr_ctrl *mr_ctrl,
|
| H A D | mlx4_rxq.c | 569 mlx4_mr_update_mp(dev, &rxq->mr_ctrl, rxq->mp); in mlx4_rxq_attach() 877 if (mlx4_mr_btree_init(&rxq->mr_ctrl.cache_bh, in mlx4_rx_queue_setup() 939 mlx4_mr_btree_free(&rxq->mr_ctrl.cache_bh); in mlx4_rx_queue_release()
|
| H A D | mlx4_rxtx.c | 1310 if (unlikely(mlx4_mr_btree_len(&rxq->mr_ctrl.cache_bh) > 1)) in mlx4_rx_burst()
|
| /f-stack/dpdk/drivers/net/mlx5/ |
| H A D | mlx5_mr.c | 28 struct mlx5_mr_ctrl *mr_ctrl; member 174 struct mlx5_mr_ctrl *mr_ctrl = &rxq->mr_ctrl; in mlx5_rx_addr2mr_bh() local 178 &priv->sh->share_cache, mr_ctrl, addr, in mlx5_rx_addr2mr_bh() 198 struct mlx5_mr_ctrl *mr_ctrl = &txq->mr_ctrl; in mlx5_tx_addr2mr_bh() local 202 &priv->sh->share_cache, mr_ctrl, addr, in mlx5_tx_addr2mr_bh() 254 struct mlx5_mr_ctrl *mr_ctrl = data->mr_ctrl; in mlx5_mr_update_ext_mp_cb() local 448 .mr_ctrl = mr_ctrl, in mlx5_mr_update_ext_mp() 476 struct mlx5_mr_ctrl *mr_ctrl = &txq->mr_ctrl; in mlx5_tx_update_ext_mp() local 509 &priv->sh->share_cache, data->mr_ctrl, in mlx5_mr_update_mp_cb() 535 .mr_ctrl = mr_ctrl, in mlx5_mr_update_mp() [all …]
|
| H A D | mlx5_rxtx.h | 145 struct mlx5_mr_ctrl mr_ctrl; /* MR control descriptor. */ member 258 struct mlx5_mr_ctrl mr_ctrl; /* MR control descriptor. */ member 464 void mlx5_mr_flush_local_cache(struct mlx5_mr_ctrl *mr_ctrl); 563 struct mlx5_mr_ctrl *mr_ctrl = &rxq->mr_ctrl; in mlx5_rx_addr2mr() local 567 lkey = mlx5_mr_lookup_lkey(mr_ctrl->cache, &mr_ctrl->mru, in mlx5_rx_addr2mr() 591 struct mlx5_mr_ctrl *mr_ctrl = &txq->mr_ctrl; in mlx5_tx_mb2mr() local 596 if (unlikely(*mr_ctrl->dev_gen_ptr != mr_ctrl->cur_gen)) in mlx5_tx_mb2mr() 597 mlx5_mr_flush_local_cache(mr_ctrl); in mlx5_tx_mb2mr() 599 lkey = mlx5_mr_lookup_lkey(mr_ctrl->cache, &mr_ctrl->mru, in mlx5_tx_mb2mr() 766 if (unlikely(mlx5_mr_btree_len(&rxq->mr_ctrl.cache_bh) > 1)) in mprq_buf_replace()
|
| H A D | mlx5_mr.h | 25 int mlx5_mr_update_mp(struct rte_eth_dev *dev, struct mlx5_mr_ctrl *mr_ctrl,
|
| H A D | mlx5_txq.c | 1114 if (mlx5_mr_btree_init(&tmpl->txq.mr_ctrl.cache_bh, in mlx5_txq_new() 1120 tmpl->txq.mr_ctrl.dev_gen_ptr = &priv->sh->share_cache.dev_gen; in mlx5_txq_new() 1256 mlx5_mr_btree_free(&txq_ctrl->txq.mr_ctrl.cache_bh); in mlx5_txq_release()
|
| H A D | mlx5_rxtx_vec.c | 121 if (unlikely(mlx5_mr_btree_len(&rxq->mr_ctrl.cache_bh) in mlx5_rx_replenish_bulk_mbuf()
|
| H A D | mlx5_trigger.c | 155 mlx5_mr_update_mp(dev, &rxq_ctrl->rxq.mr_ctrl, in mlx5_rxq_start() 162 (dev, &rxq_ctrl->rxq.mr_ctrl, in mlx5_rxq_start()
|
| H A D | mlx5_rxq.c | 1518 if (mlx5_mr_btree_init(&tmpl->rxq.mr_ctrl.cache_bh, in mlx5_rxq_new() 1732 tmpl->rxq.mr_ctrl.cache_bh = (struct mlx5_mr_btree) { 0 }; in mlx5_rxq_hairpin_new() 1799 mlx5_mr_btree_free(&rxq_ctrl->rxq.mr_ctrl.cache_bh); in mlx5_rxq_release()
|
| H A D | mlx5_rxtx.c | 1469 if (unlikely(mlx5_mr_btree_len(&rxq->mr_ctrl.cache_bh) > 1)) in mlx5_rx_burst()
|
| /f-stack/dpdk/drivers/common/mlx5/ |
| H A D | mlx5_common_mr.c | 870 struct mlx5_mr_ctrl *mr_ctrl, in mr_lookup_caches() argument 936 struct mr_cache_entry *repl = &mr_ctrl->cache[mr_ctrl->head]; in mlx5_mr_addr2mr_bh() 955 mr_ctrl->mru = mr_ctrl->head; in mlx5_mr_addr2mr_bh() 957 mr_ctrl->head = (mr_ctrl->head + 1) % MLX5_MR_CACHE_N; in mlx5_mr_addr2mr_bh() 1001 mr_ctrl->mru = 0; in mlx5_mr_flush_local_cache() 1003 mr_ctrl->head = 0; in mlx5_mr_flush_local_cache() 1004 memset(mr_ctrl->cache, 0, sizeof(mr_ctrl->cache)); in mlx5_mr_flush_local_cache() 1006 mr_ctrl->cache_bh.len = 1; in mlx5_mr_flush_local_cache() 1007 mr_ctrl->cache_bh.overflow = 0; in mlx5_mr_flush_local_cache() 1009 mr_ctrl->cur_gen = *mr_ctrl->dev_gen_ptr; in mlx5_mr_flush_local_cache() [all …]
|
| H A D | mlx5_common_mr.h | 135 struct mlx5_mr_ctrl *mr_ctrl, 144 void mlx5_mr_flush_local_cache(struct mlx5_mr_ctrl *mr_ctrl);
|
| /f-stack/dpdk/drivers/regex/mlx5/ |
| H A D | mlx5_regex.h | 53 struct mlx5_mr_ctrl mr_ctrl; member
|
| H A D | mlx5_regex_control.c | 373 ret = mlx5_mr_btree_init(&qp->mr_ctrl.cache_bh, MLX5_MR_BTREE_CACHE_N, in mlx5_regex_qp_setup() 388 mlx5_mr_btree_free(&qp->mr_ctrl.cache_bh); in mlx5_regex_qp_setup()
|
| H A D | mlx5_regex_fastpath.c | 110 &priv->mr_scache, &qp->mr_ctrl, in prep_one()
|