| /dpdk/drivers/mempool/cnxk/ |
| H A D | cn10k_mempool_ops.c | 64 batch_op_data_get(uint64_t pool_id) in batch_op_data_get() argument 85 op_data = batch_op_data_get(mp->pool_id); in batch_op_init() 99 batch_op_data_set(mp->pool_id, op_data); in batch_op_init() 111 op_data = batch_op_data_get(mp->pool_id); in batch_op_fini() 123 roc_npa_aura_op_bulk_free(mp->pool_id, mem->objs, in batch_op_fini() 130 batch_op_data_set(mp->pool_id, NULL); in batch_op_fini() 148 roc_npa_aura_op_free(mp->pool_id, 1, ptr[0]); in cn10k_mempool_enq() 152 op_data = batch_op_data_get(mp->pool_id); in cn10k_mempool_enq() 167 op_data = batch_op_data_get(mp->pool_id); in cn10k_mempool_get_count() 195 op_data = batch_op_data_get(mp->pool_id); in cn10k_mempool_deq() [all …]
|
| H A D | cnxk_mempool_ops.c | 21 roc_npa_aura_op_free(mp->pool_id, 0, in cnxk_mempool_enq() 38 obj = roc_npa_aura_op_alloc(mp->pool_id, 0); in cnxk_mempool_deq() 54 return (unsigned int)roc_npa_aura_op_available(mp->pool_id); in cnxk_mempool_get_count() 111 mp->pool_id = aura_handle; in cnxk_mempool_alloc() 125 plt_npa_dbg("aura_handle=0x%" PRIx64, mp->pool_id); in cnxk_mempool_free() 126 rc = roc_npa_pool_destroy(mp->pool_id); in cnxk_mempool_free() 163 roc_npa_aura_op_range_set(mp->pool_id, iova, in cnxk_mempool_populate() 166 if (roc_npa_pool_range_update_check(mp->pool_id) < 0) in cnxk_mempool_populate()
|
| H A D | cn9k_mempool_ops.c | 17 roc_npa_aura_op_bulk_free(mp->pool_id, (const uint64_t *)obj_table, n, in cn9k_mempool_enq() 28 count = roc_npa_aura_op_bulk_alloc(mp->pool_id, (uint64_t *)obj_table, in cn9k_mempool_deq()
|
| H A D | cnxk_mempool_telemetry.c | 28 aura_id = roc_npa_aura_handle_to_aura(mp->pool_id); in mempool_info_cb()
|
| /dpdk/drivers/mempool/octeontx/ |
| H A D | rte_mempool_octeontx.c | 37 mp->pool_id = (uint64_t)pool; in octeontx_fpavf_alloc() 49 pool = (uintptr_t)mp->pool_id; in octeontx_fpavf_free() 77 pool = (uintptr_t)mp->pool_id; in octeontx_fpavf_enqueue() 94 pool = (uintptr_t)mp->pool_id; in octeontx_fpavf_dequeue() 124 pool = (uintptr_t)mp->pool_id; in octeontx_fpavf_get_count() 181 gpool = octeontx_fpa_bufpool_gpool(mp->pool_id); in octeontx_fpavf_populate() 182 pool_bar = mp->pool_id & ~(uint64_t)FPA_GPOOL_MASK; in octeontx_fpavf_populate()
|
| /dpdk/drivers/common/cnxk/ |
| H A D | roc_npa.c | 229 pool_id); in npa_stack_memzone_name() 236 const char *mz_name = npa_stack_memzone_name(lf, pool_id, name); in npa_stack_dma_alloc() 243 npa_stack_dma_free(struct npa_lf *lf, char *name, int pool_id) in npa_stack_dma_free() argument 268 int rc, aura_id, pool_id, stack_size, alloc_size; in npa_aura_pool_pair_alloc() local 302 pool_id = aura_id; in npa_aura_pool_pair_alloc() 303 rc = (aura_id < 0 || pool_id >= (int)lf->nr_pools || in npa_aura_pool_pair_alloc() 314 mz = npa_stack_dma_alloc(lf, name, pool_id, alloc_size); in npa_aura_pool_pair_alloc() 351 pool->err_qint_idx = pool_id % lf->qints; in npa_aura_pool_pair_alloc() 453 int aura_id, pool_id, rc; in npa_aura_pool_pair_free() local 459 pool_id = aura_id; in npa_aura_pool_pair_free() [all …]
|
| H A D | cnxk_telemetry_npa.c | 116 cnxk_tel_npa_pool(int pool_id, struct plt_tel_data *d) in cnxk_tel_npa_pool() argument 128 if (plt_bitmap_get(lf->npa_bmp, pool_id)) in cnxk_tel_npa_pool() 137 req->aura_id = pool_id; in cnxk_tel_npa_pool() 143 plt_err("Failed to get pool(%d) context", pool_id); in cnxk_tel_npa_pool()
|
| H A D | roc_nix_fc.c | 322 rox_nix_fc_npa_bp_cfg(struct roc_nix *roc_nix, uint64_t pool_id, uint8_t ena, in rox_nix_fc_npa_bp_cfg() argument 344 req->aura_id = roc_npa_aura_handle_to_aura(pool_id); in rox_nix_fc_npa_bp_cfg() 371 req->aura_id = roc_npa_aura_handle_to_aura(pool_id); in rox_nix_fc_npa_bp_cfg() 391 req->aura_id = roc_npa_aura_handle_to_aura(pool_id); in rox_nix_fc_npa_bp_cfg()
|
| /dpdk/drivers/net/bnxt/tf_core/ |
| H A D | tf_tcam_shared.c | 348 enum tf_tcam_shared_wc_pool_id pool_id; in tf_tcam_shared_unbind() local 413 for (pool_id = TF_TCAM_SHARED_WC_POOL_HI; in tf_tcam_shared_unbind() 414 pool_id < TF_TCAM_SHARED_WC_POOL_MAX; in tf_tcam_shared_unbind() 415 pool_id++) { in tf_tcam_shared_unbind() 416 pool = tcam_shared_wc->db[dir][pool_id].pool; in tf_tcam_shared_unbind() 417 start = tcam_shared_wc->db[dir][pool_id].info.start; in tf_tcam_shared_unbind() 424 tf_pool_2_str(pool_id), in tf_tcam_shared_unbind() 436 tf_pool_2_str(pool_id), in tf_tcam_shared_unbind() 464 pool_id, in tf_tcam_shared_unbind()
|
| /dpdk/drivers/crypto/caam_jr/ |
| H A D | caam_jr_hw_specific.h | 304 uint8_t pool_id; member 308 uint8_t pool_id;
|
| /dpdk/drivers/event/cnxk/ |
| H A D | cnxk_eventdev_adptr.c | 194 rq->vwqe_aura_handle = roc_npa_aura_handle_to_aura(vmp->pool_id); in cnxk_sso_rx_adapter_vwqe_enable() 251 rxq_sp->qconf.mp->pool_id, true, in cnxk_sso_rx_adapter_queue_add() 294 rxq_sp->qconf.mp->pool_id, false, in cnxk_sso_rx_adapter_queue_del()
|
| H A D | cn10k_worker.h | 149 aura_handle = mbuf->pool->pool_id; in cn10k_process_vwqe() 243 roc_npa_aura_op_free(m->pool->pool_id, 0, iova); in cn10k_sso_hws_post_process()
|
| H A D | cnxk_tim_evdev.c | 60 tim_ring->chunk_pool->pool_id); in cnxk_tim_chnk_pool_create()
|
| /dpdk/drivers/net/dpaa/fmlib/ |
| H A D | fm_port_ext.h | 2859 uint8_t pool_id, bool enable); 2929 uint8_t pool_id); 2947 uint8_t pool_id, uint32_t value);
|
| /dpdk/drivers/crypto/dpaa_sec/ |
| H A D | dpaa_sec.h | 91 uint8_t pool_id; member 95 uint8_t pool_id;
|
| /dpdk/drivers/net/octeontx/ |
| H A D | octeontx_rxtx.h | 351 octeontx_fpa_bufpool_gaura((uintptr_t)m_tofree->pool->pool_id); in __octeontx_xmit_prepare() 407 m_tofree->pool->pool_id); in __octeontx_xmit_mseg_prepare()
|
| /dpdk/drivers/net/cnxk/ |
| H A D | cn9k_tx.h | 171 send_hdr->w0.aura = roc_npa_aura_handle_to_aura(m->pool->pool_id); in cn9k_nix_xmit_prepare() 1126 offsetof(struct rte_mempool, pool_id)); in cn9k_nix_xmit_pkts_vector() 1128 offsetof(struct rte_mempool, pool_id)); in cn9k_nix_xmit_pkts_vector() 1130 offsetof(struct rte_mempool, pool_id)); in cn9k_nix_xmit_pkts_vector() 1132 offsetof(struct rte_mempool, pool_id)); in cn9k_nix_xmit_pkts_vector()
|
| H A D | cnxk_ethdev.c | 535 if (mp->pool_id == 0) { in cnxk_nix_rx_queue_setup() 552 roc_nix_inl_dev_xaq_realloc(mp->pool_id); in cnxk_nix_rx_queue_setup() 567 rq->aura_handle = mp->pool_id; in cnxk_nix_rx_queue_setup()
|
| H A D | cn10k_tx.h | 626 send_hdr->w0.aura = roc_npa_aura_handle_to_aura(m->pool->pool_id); in cn10k_nix_xmit_prepare() 1896 offsetof(struct rte_mempool, pool_id)); in cn10k_nix_xmit_pkts_vector() 1898 offsetof(struct rte_mempool, pool_id)); in cn10k_nix_xmit_pkts_vector() 1900 offsetof(struct rte_mempool, pool_id)); in cn10k_nix_xmit_pkts_vector() 1902 offsetof(struct rte_mempool, pool_id)); in cn10k_nix_xmit_pkts_vector()
|
| H A D | cn10k_ethdev.c | 287 rxq->aura_handle = rxq_sp->qconf.mp->pool_id; in cn10k_nix_rx_queue_setup()
|
| H A D | cn10k_ethdev_sec.c | 387 roc_npa_aura_op_free(mbuf->pool->pool_id, 1, (rte_iova_t)mbuf); in cnxk_pktmbuf_free_no_cache()
|
| /dpdk/drivers/event/octeontx/ |
| H A D | timvf_evdev.c | 170 timr->chunk_pool)->pool_id; in timvf_ring_start()
|
| /dpdk/doc/guides/sample_app_ug/ |
| H A D | vmdq_forwarding.rst | 91 is assigned to the MAC like 52:54:00:12:<port_id>:<pool_id>, that is,
|
| H A D | vmdq_dcb_forwarding.rst | 115 is assigned to the MAC like 52:54:00:12:<port_id>:<pool_id>, that is,
|
| /dpdk/lib/mempool/ |
| H A D | rte_mempool.h | 213 uint64_t pool_id; /**< External mempool identifier. */ member
|