| /f-stack/dpdk/app/test-crypto-perf/ |
| H A D | cperf_test_vectors.c | 461 rte_malloc_virt2iova(t_vec->digest.data); in cperf_test_vector_get_dummy() 525 rte_malloc_virt2iova(t_vec->digest.data); in cperf_test_vector_get_dummy() 558 t_vec->aad.phys_addr = rte_malloc_virt2iova(t_vec->aad.data); in cperf_test_vector_get_dummy() 573 rte_malloc_virt2iova(t_vec->digest.data); in cperf_test_vector_get_dummy()
|
| H A D | cperf_test_vector_parsing.c | 420 vector->aad.phys_addr = rte_malloc_virt2iova(vector->aad.data); in parse_entry() 435 vector->digest.phys_addr = rte_malloc_virt2iova( in parse_entry()
|
| /f-stack/dpdk/lib/librte_eal/include/ |
| H A D | rte_malloc.h | 562 rte_malloc_virt2iova(const void *addr);
|
| /f-stack/dpdk/drivers/net/ark/ |
| H A D | ark_ethdev_tx.c | 295 queue_base = rte_malloc_virt2iova(queue); in eth_ark_tx_hw_queue_config() 296 ring_base = rte_malloc_virt2iova(queue->meta_q); in eth_ark_tx_hw_queue_config()
|
| H A D | ark_ethdev_rx.c | 77 queue_base = rte_malloc_virt2iova(queue); in eth_ark_rx_hw_setup() 81 phys_addr_q_base = rte_malloc_virt2iova(queue->paddress_q); in eth_ark_rx_hw_setup()
|
| /f-stack/dpdk/drivers/net/sfc/ |
| H A D | sfc_tso.c | 131 header_paddr = rte_malloc_virt2iova((void *)tsoh); in sfc_efx_tso_do()
|
| H A D | sfc_ef10_tx.c | 952 txq->tsoh_iova = rte_malloc_virt2iova(txq->tsoh); in sfc_ef10_tx_qcreate()
|
| /f-stack/dpdk/lib/librte_eal/ |
| H A D | rte_eal_exports.def | 90 rte_malloc_virt2iova
|
| H A D | version.map | 123 rte_malloc_virt2iova;
|
| /f-stack/dpdk/drivers/compress/qat/ |
| H A D | qat_comp_pmd.c | 172 rte_malloc_virt2iova(cookie->qat_sgl_src_d); in qat_comp_qp_setup() 175 rte_malloc_virt2iova(cookie->qat_sgl_dst_d); in qat_comp_qp_setup()
|
| H A D | qat_comp.c | 234 rte_malloc_virt2iova(cookie->qat_sgl_src_d); in qat_comp_build_request() 275 rte_malloc_virt2iova(cookie->qat_sgl_dst_d); in qat_comp_build_request()
|
| /f-stack/dpdk/lib/librte_eal/common/ |
| H A D | eal_common_memzone.c | 178 mz->iova = rte_malloc_virt2iova(mz_addr); in memzone_reserve_aligned_thread_unsafe()
|
| H A D | rte_malloc.c | 360 rte_malloc_virt2iova(const void *addr) in rte_malloc_virt2iova() function
|
| /f-stack/dpdk/examples/fips_validation/ |
| H A D | main.c | 652 sym->auth.digest.phys_addr = rte_malloc_virt2iova(env.digest); in prepare_auth_op() 698 sym->aead.digest.phys_addr = rte_malloc_virt2iova(env.digest); in prepare_aead_op() 706 sym->aead.digest.phys_addr = rte_malloc_virt2iova( in prepare_aead_op() 713 sym->aead.aad.phys_addr = rte_malloc_virt2iova(sym->aead.aad.data); in prepare_aead_op()
|
| H A D | fips_dev_self_test.c | 1312 sym->aead.aad.phys_addr = rte_malloc_virt2iova(dst); in prepare_aead_op()
|
| /f-stack/dpdk/drivers/net/octeontx/base/ |
| H A D | octeontx_pkovf.c | 507 pko_vf_ctl.fc_iomem.iova = rte_malloc_virt2iova((void *) in octeontx_pko_init_fc()
|
| /f-stack/dpdk/drivers/net/bnxt/ |
| H A D | bnxt_hwrm.c | 401 rte_cpu_to_le_64(rte_malloc_virt2iova(vlan_table)); in bnxt_hwrm_cfa_l2_set_rx_mask() 442 rte_cpu_to_le_64(rte_malloc_virt2iova(vlan_table)); in bnxt_hwrm_cfa_vlan_antispoof_cfg() 1152 rte_malloc_virt2iova(bp->hwrm_cmd_resp_addr); in bnxt_hwrm_ver_get() 1188 rte_malloc_virt2iova(bp->hwrm_short_cmd_req_addr); in bnxt_hwrm_ver_get() 2661 rte_malloc_virt2iova(bp->hwrm_cmd_resp_addr); in bnxt_alloc_hwrm_resources() 3888 rte_cpu_to_le_64(rte_malloc_virt2iova(bp->pf->vf_req_buf)); in bnxt_hwrm_func_buf_rgtr() 4321 dma_handle = rte_malloc_virt2iova(buf); in bnxt_get_nvram_directory() 4355 dma_handle = rte_malloc_virt2iova(buf); in bnxt_hwrm_get_nvram_item() 4408 dma_handle = rte_malloc_virt2iova(buf); in bnxt_hwrm_flash_nvram() 4471 req.vnic_id_tbl_addr = rte_cpu_to_le_64(rte_malloc_virt2iova(vnic_ids)); in bnxt_hwrm_func_vf_vnic_query()
|
| /f-stack/dpdk/drivers/crypto/virtio/ |
| H A D | virtio_cryptodev.c | 145 phys_addr_started = rte_malloc_virt2iova(virt_addr_started); in virtio_crypto_send_command() 981 malloc_phys_addr = rte_malloc_virt2iova(malloc_virt_addr); in virtio_crypto_sym_clear_session()
|
| /f-stack/dpdk/drivers/baseband/fpga_5gnr_fec/ |
| H A D | rte_fpga_5gnr_fec.c | 259 fpga_dev->sw_rings_phys = rte_malloc_virt2iova(fpga_dev->sw_rings); in fpga_setup_queues() 274 phys_addr = rte_malloc_virt2iova(fpga_dev->flush_queue_status); in fpga_setup_queues() 466 rte_malloc_virt2iova(q->ring_head_addr); in fpga_queue_setup()
|
| /f-stack/dpdk/drivers/baseband/acc100/ |
| H A D | rte_acc100_pmd.c | 69 rte_iova_t unaligned_phy_mem = rte_malloc_virt2iova(unaligned_virt_mem); in calc_mem_alignment_offset() 321 d->sw_rings_iova = rte_malloc_virt2iova(d->sw_rings_base) + in alloc_2x64mb_sw_rings_mem() 362 sw_rings_base_iova = rte_malloc_virt2iova(sw_rings_base); in alloc_sw_rings_min_mem() 582 info_ring_iova = rte_malloc_virt2iova(d->info_ring); in allocate_info_ring() 674 d->tail_ptr_iova = rte_malloc_virt2iova(d->tail_ptrs); in acc100_setup_queues() 887 q->lb_in_addr_iova = rte_malloc_virt2iova(q->lb_in); in acc100_queue_setup() 897 q->lb_out_addr_iova = rte_malloc_virt2iova(q->lb_out); in acc100_queue_setup()
|
| /f-stack/dpdk/drivers/mempool/octeontx/ |
| H A D | octeontx_fpavf.c | 247 phys_addr = rte_malloc_virt2iova(memptr); in octeontx_fpapf_pool_setup()
|
| /f-stack/dpdk/drivers/baseband/fpga_lte_fec/ |
| H A D | fpga_lte_fec.c | 551 fpga_dev->sw_rings_phys = rte_malloc_virt2iova(fpga_dev->sw_rings); in fpga_setup_queues() 566 phys_addr = rte_malloc_virt2iova(fpga_dev->flush_queue_status); in fpga_setup_queues() 738 rte_malloc_virt2iova(q->ring_head_addr); in fpga_queue_setup()
|
| /f-stack/dpdk/drivers/net/octeontx2/ |
| H A D | otx2_ethdev_sec.c | 296 md_iova = rte_malloc_virt2iova(md); in hmac_init()
|
| /f-stack/dpdk/drivers/net/netvsc/ |
| H A D | hn_rndis.c | 262 addr = rte_malloc_virt2iova(req); in hn_nvs_send_rndis_ctrl()
|
| /f-stack/dpdk/doc/guides/rel_notes/ |
| H A D | release_17_11.rst | 380 * ``rte_malloc_virt2phy`` is aliased with ``rte_malloc_virt2iova``.
|