| /dpdk/drivers/net/qede/base/ |
| H A D | bcm_osal.c | 128 dma_addr_t *phys, size_t size) in osal_dma_alloc_coherent() argument 138 *phys = 0; in osal_dma_alloc_coherent() 154 *phys = 0; in osal_dma_alloc_coherent() 157 *phys = mz->iova; in osal_dma_alloc_coherent() 167 dma_addr_t *phys, size_t size, int align) in osal_dma_alloc_coherent_aligned() argument 177 *phys = 0; in osal_dma_alloc_coherent_aligned() 193 *phys = 0; in osal_dma_alloc_coherent_aligned() 196 *phys = mz->iova; in osal_dma_alloc_coherent_aligned() 205 void osal_dma_free_mem(struct ecore_dev *p_dev, dma_addr_t phys) in osal_dma_free_mem() argument 210 if (phys == ecore_mz_mapping[j]->iova) { in osal_dma_free_mem()
|
| H A D | bcm_osal.h | 112 void osal_dma_free_mem(struct ecore_dev *edev, dma_addr_t phys); 114 #define OSAL_DMA_ALLOC_COHERENT(dev, phys, size) \ argument 115 osal_dma_alloc_coherent(dev, phys, size) 117 #define OSAL_DMA_ALLOC_COHERENT_ALIGNED(dev, phys, size, align) \ argument 118 osal_dma_alloc_coherent_aligned(dev, phys, size, align) 120 #define OSAL_DMA_FREE_COHERENT(dev, virt, phys, size) \ argument 121 osal_dma_free_mem(dev, phys)
|
| H A D | ecore_hw.c | 749 dma_addr_t phys = p_hwfn->dmae_info.intermediate_buffer_phys_addr; in ecore_dmae_execute_sub_operation() local 761 cmd->src_addr_hi = OSAL_CPU_TO_LE32(DMA_HI(phys)); in ecore_dmae_execute_sub_operation() 762 cmd->src_addr_lo = OSAL_CPU_TO_LE32(DMA_LO(phys)); in ecore_dmae_execute_sub_operation() 779 cmd->dst_addr_hi = OSAL_CPU_TO_LE32(DMA_HI(phys)); in ecore_dmae_execute_sub_operation() 780 cmd->dst_addr_lo = OSAL_CPU_TO_LE32(DMA_LO(phys)); in ecore_dmae_execute_sub_operation() 834 dma_addr_t phys = p_hwfn->dmae_info.completion_word_phys_addr; in ecore_dmae_execute_command() local 876 cmd->comp_addr_lo = OSAL_CPU_TO_LE32(DMA_LO(phys)); in ecore_dmae_execute_command() 877 cmd->comp_addr_hi = OSAL_CPU_TO_LE32(DMA_HI(phys)); in ecore_dmae_execute_command()
|
| H A D | ecore_vf.c | 207 p_iov->bulletin.phys, in _ecore_vf_pf_release() 332 req->bulletin_addr = p_iov->bulletin.phys; in ecore_vf_pf_acquire() 609 phys, in ecore_vf_hw_prepare() 618 p_iov->bulletin.p_virt, (unsigned long)p_iov->bulletin.phys, in ecore_vf_hw_prepare() 667 p_iov->bulletin.phys, in ecore_vf_hw_prepare()
|
| H A D | ecore_vfpf_if.h | 663 dma_addr_t phys; member
|
| H A D | ecore_sriov.c | 409 return ecore_dmae_host2host(p_hwfn, p_ptt, p_vf->bulletin.phys, in ecore_iov_post_vf_bulletin() 523 vf->bulletin.phys = idx * in ecore_iov_setup_vfdb()
|
| /dpdk/drivers/net/ena/base/ |
| H A D | ena_plat_dpdk.h | 212 dmadev, size, virt, phys, mem_handle, alignment) \ argument 219 (phys) = phys_addr; \ 221 #define ENA_MEM_ALLOC_COHERENT(dmadev, size, virt, phys, mem_handle) \ argument 222 ENA_MEM_ALLOC_COHERENT_ALIGNED(dmadev, size, virt, phys, \ 224 #define ENA_MEM_FREE_COHERENT(dmadev, size, virt, phys, mem_handle) \ argument 225 ({ ENA_TOUCH(size); ENA_TOUCH(phys); ENA_TOUCH(dmadev); \ 229 dmadev, size, virt, phys, mem_handle, node, dev_node, alignment) \ argument 237 (phys) = phys_addr; \ 240 dmadev, size, virt, phys, mem_handle, node, dev_node) \ argument 241 ENA_MEM_ALLOC_COHERENT_NODE_ALIGNED(dmadev, size, virt, phys, \
|
| /dpdk/lib/eal/windows/ |
| H A D | eal_memory.c | 210 LARGE_INTEGER phys; in rte_mem_virt2phy() local 218 &virt, sizeof(virt), &phys, sizeof(phys), in rte_mem_virt2phy() 224 return phys.QuadPart; in rte_mem_virt2phy() 230 phys_addr_t phys; in rte_mem_virt2iova() local 235 phys = rte_mem_virt2phy(virt); in rte_mem_virt2iova() 236 if (phys == RTE_BAD_PHYS_ADDR) in rte_mem_virt2iova() 238 return (rte_iova_t)phys; in rte_mem_virt2iova()
|
| /dpdk/drivers/net/thunderx/ |
| H A D | nicvf_struct.h | 21 nicvf_iova_addr_t phys; member 31 nicvf_iova_addr_t phys; member 78 nicvf_iova_addr_t phys; member
|
| H A D | nicvf_ethdev.c | 574 rxq->phys = rz->iova; in nicvf_qset_cq_alloc() 598 sq->phys = rz->iova; in nicvf_qset_sq_alloc() 633 rbdr->phys = rz->iova; in nicvf_qset_rbdr_alloc() 1031 txq->phys, txq->offloads); in nicvf_dev_tx_queue_setup() 1369 rte_mempool_avail_count(mp), rxq->phys, offloads); in nicvf_dev_rx_queue_setup()
|
| /dpdk/drivers/net/hinic/base/ |
| H A D | hinic_pmd_hwdev.c | 177 void *virt, dma_addr_t phys) in hinic_dma_mem_free() argument 184 if (virt == NULL || phys == 0) in hinic_dma_mem_free() 188 sig = HINIC_HASH_FUNC(&phys, HINIC_HASH_KEY_LEN, in hinic_dma_mem_free() 193 (void *)phys, rc); in hinic_dma_mem_free() 202 (void *)phys, virt, size); in hinic_dma_mem_free() 236 hinic_dma_mem_free(hwdev, size, virt, phys); in dma_free_coherent() 240 volatile void *virt, dma_addr_t phys) in dma_free_coherent_volatile() argument 248 if (virt == NULL || phys == 0) in dma_free_coherent_volatile() 252 sig = HINIC_HASH_FUNC(&phys, HINIC_HASH_KEY_LEN, in dma_free_coherent_volatile() 257 (void *)phys, rc); in dma_free_coherent_volatile() [all …]
|
| H A D | hinic_compat.h | 129 void dma_free_coherent(void *dev, size_t size, void *virt, dma_addr_t phys);
|
| H A D | hinic_pmd_hwdev.h | 452 volatile void *virt, dma_addr_t phys);
|
| /dpdk/drivers/event/dlb2/pf/ |
| H A D | dlb2_pf.c | 233 dlb2_alloc_coherent_aligned(const struct rte_memzone **mz, uintptr_t *phys, in dlb2_alloc_coherent_aligned() argument 250 *phys = 0; in dlb2_alloc_coherent_aligned() 253 *phys = (*mz)->iova; in dlb2_alloc_coherent_aligned()
|
| /dpdk/lib/kni/ |
| H A D | rte_kni.c | 661 void *phys[MAX_MBUF_BURST_NUM]; in kni_allocate_mbufs() local 695 phys[i] = va2pa(pkts[i]); in kni_allocate_mbufs() 702 ret = kni_fifo_put(kni->alloc_q, phys, i); in kni_allocate_mbufs()
|
| /dpdk/drivers/net/thunderx/base/ |
| H A D | nicvf_hw.c | 443 nicvf_queue_reg_write(nic, NIC_QSET_RBDR_0_1_BASE, qidx, rbdr->phys); in nicvf_qset_rbdr_config() 558 nicvf_queue_reg_write(nic, NIC_QSET_SQ_0_7_BASE, qidx, txq->phys); in nicvf_qset_sq_config() 664 nicvf_queue_reg_write(nic, NIC_QSET_CQ_0_7_BASE, qidx, rxq->phys); in nicvf_qset_cq_config()
|
| /dpdk/drivers/net/cxgbe/ |
| H A D | sge.c | 1388 dma_addr_t *phys, void *metadata) in alloc_ring() argument 1427 *phys = (uint64_t)tz->iova; in alloc_ring()
|