| /f-stack/dpdk/drivers/net/thunderx/ |
| H A D | nicvf_rxtx.h | 35 sqe.gather.addr = rte_mbuf_data_iova(pkt); in fill_sq_desc_gather() 55 entry->buff[1] = rte_mbuf_data_iova(pkt); in fill_sq_desc_gather()
|
| /f-stack/dpdk/drivers/net/sfc/ |
| H A D | sfc_ef100_tx.c | 374 ESF_GZ_TX_SEND_ADDR, rte_mbuf_data_iova(m), in sfc_ef100_tx_qdesc_send_create() 579 rte_mbuf_data_iova(m_seg), in sfc_ef100_xmit_tso_pkt() 589 rte_mbuf_data_iova(m_seg), in sfc_ef100_xmit_tso_pkt() 596 rte_mbuf_data_iova(m_seg) + remaining_hdr_len, in sfc_ef100_xmit_tso_pkt() 704 sfc_ef100_tx_qdesc_seg_create(rte_mbuf_data_iova(m_seg), in sfc_ef100_xmit_pkts()
|
| H A D | sfc_ef10_tx.c | 426 hdr_iova = rte_mbuf_data_iova(m_seg); in sfc_ef10_xmit_tso_pkt() 509 rte_iova_t next_frag = rte_mbuf_data_iova(m_seg); in sfc_ef10_xmit_tso_pkt() 675 rte_iova_t seg_addr = rte_mbuf_data_iova(m_seg); in sfc_ef10_xmit_pkts() 853 sfc_ef10_tx_qdesc_dma_create(rte_mbuf_data_iova(pkt), in sfc_ef10_simple_xmit_pkts()
|
| H A D | sfc_tx.c | 841 next_frag = rte_mbuf_data_iova(m_seg); in sfc_efx_xmit_pkts()
|
| /f-stack/dpdk/drivers/net/ionic/ |
| H A D | ionic_rxtx.c | 394 data_iova = rte_mbuf_data_iova(txm_seg); in ionic_tx_tso() 405 data_iova = rte_mbuf_data_iova(txm_seg); in ionic_tx_tso() 480 elem->addr = rte_cpu_to_le_64(rte_mbuf_data_iova(txm_seg)); in ionic_tx() 926 data_iova = rte_mbuf_data_iova(rxm_seg); in ionic_rx_fill()
|
| /f-stack/dpdk/drivers/net/octeontx2/ |
| H A D | otx2_ethdev_sec_tx.h | 146 sd->nix_iova.addr = rte_mbuf_data_iova(m); in otx2_sec_event_tx()
|
| H A D | otx2_tx.h | 358 *(rte_iova_t *)(++sg) = rte_mbuf_data_iova(m); in otx2_nix_xmit_prepare() 441 *slist = rte_mbuf_data_iova(m); in otx2_nix_prepare_mseg()
|
| /f-stack/dpdk/drivers/net/octeontx/ |
| H A D | octeontx_rxtx.h | 347 cmd_buf[nb_desc++] = rte_mbuf_data_iova(tx_pkt); in __octeontx_xmit_prepare() 402 cmd_buf[nb_desc++] = rte_mbuf_data_iova(tx_pkt); in __octeontx_xmit_mseg_prepare()
|
| /f-stack/dpdk/drivers/net/bnxt/ |
| H A D | bnxt_txr.c | 185 txbd->address = rte_cpu_to_le_64(rte_mbuf_data_iova(tx_buf->mbuf)); in bnxt_start_xmit() 326 txbd->address = rte_cpu_to_le_64(rte_mbuf_data_iova(m_seg)); in bnxt_start_xmit()
|
| /f-stack/dpdk/drivers/net/hinic/ |
| H A D | hinic_pmd_tx.c | 340 dma_addr = rte_mbuf_data_iova(mbuf); in hinic_mbuf_dma_map_sge() 360 dma_addr = rte_mbuf_data_iova(mbuf); in hinic_mbuf_dma_map_sge() 381 dma_addr = rte_mbuf_data_iova(mbuf); in hinic_mbuf_dma_map_sge() 405 dma_addr = rte_mbuf_data_iova(mbuf); in hinic_mbuf_dma_map_sge()
|
| /f-stack/dpdk/drivers/net/qede/ |
| H A D | qede_rxtx.c | 2076 mapping = rte_mbuf_data_iova(m_seg); in qede_encode_sg_bd() 2086 mapping = rte_mbuf_data_iova(m_seg); in qede_encode_sg_bd() 2094 mapping = rte_mbuf_data_iova(m_seg); in qede_encode_sg_bd() 2317 QEDE_BD_SET_ADDR_LEN(bd1, rte_mbuf_data_iova(mbuf), 2623 QEDE_BD_SET_ADDR_LEN(bd1, rte_mbuf_data_iova(mbuf), 2636 QEDE_BD_SET_ADDR_LEN(bd1, rte_mbuf_data_iova(mbuf), 2640 rte_mbuf_data_iova(mbuf)),
|
| /f-stack/dpdk/drivers/net/liquidio/ |
| H A D | lio_rxtx.c | 1251 sc->dma_addr = rte_mbuf_data_iova(m); in lio_alloc_soft_command() 1692 ndata.cmd.cmd3.dptr = rte_mbuf_data_iova(m); in lio_dev_xmit_pkts() 1724 g->sg[0].ptr[0] = rte_mbuf_data_iova(m); in lio_dev_xmit_pkts() 1735 rte_mbuf_data_iova(m); in lio_dev_xmit_pkts()
|
| /f-stack/dpdk/drivers/net/ark/ |
| H A D | ark_ethdev_tx.c | 73 meta->physaddr = rte_mbuf_data_iova(mbuf); in eth_ark_tx_meta_from_mbuf()
|
| /f-stack/dpdk/drivers/net/netvsc/ |
| H A D | hn_rxtx.c | 1388 unsigned int offs = rte_mbuf_data_iova(m) & PAGE_MASK; in hn_get_slots() 1404 rte_iova_t addr = rte_mbuf_data_iova(m); in hn_fill_sg()
|
| /f-stack/dpdk/lib/librte_mbuf/ |
| H A D | rte_mbuf.h | 149 rte_mbuf_data_iova(const struct rte_mbuf *mb) in rte_mbuf_data_iova() function
|
| /f-stack/dpdk/drivers/net/axgbe/ |
| H A D | axgbe_rxtx.c | 725 desc->baddr = rte_mbuf_data_iova(mbuf); in axgbe_xmit_hw()
|
| /f-stack/dpdk/drivers/net/i40e/ |
| H A D | i40e_rxtx.c | 1187 buf_dma_addr = rte_mbuf_data_iova(m_seg); in i40e_xmit_pkts() 1303 dma_addr = rte_mbuf_data_iova(*pkts); in tx4() 1317 dma_addr = rte_mbuf_data_iova(*pkts); in tx1()
|
| /f-stack/dpdk/drivers/net/ice/ |
| H A D | ice_rxtx.c | 2656 buf_dma_addr = rte_mbuf_data_iova(m_seg); in ice_xmit_pkts() 2876 dma_addr = rte_mbuf_data_iova(*pkts); in tx4() 2890 dma_addr = rte_mbuf_data_iova(*pkts); in tx1()
|
| /f-stack/dpdk/drivers/net/hns3/ |
| H A D | hns3_rxtx.c | 2733 desc->addr = rte_mbuf_data_iova(rxm); in hns3_fill_per_desc() 3466 dma_addr = rte_mbuf_data_iova(*pkts); in hns3_tx_setup_4bd() 3482 dma_addr = rte_mbuf_data_iova(*pkts); in hns3_tx_setup_1bd()
|
| /f-stack/dpdk/drivers/net/atlantic/ |
| H A D | atl_rxtx.c | 1277 buf_dma_addr = rte_mbuf_data_iova(m_seg); in atl_xmit_pkt()
|
| /f-stack/dpdk/drivers/net/txgbe/ |
| H A D | txgbe_rxtx.c | 154 buf_dma_addr = rte_mbuf_data_iova(*pkts); in tx4() 174 buf_dma_addr = rte_mbuf_data_iova(*pkts); in tx1() 932 buf_dma_addr = rte_mbuf_data_iova(m_seg); in txgbe_xmit_pkts()
|
| /f-stack/dpdk/drivers/net/vmxnet3/ |
| H A D | vmxnet3_rxtx.c | 503 gdesc->txd.addr = rte_mbuf_data_iova(m_seg); in vmxnet3_xmit_pkts()
|
| /f-stack/dpdk/drivers/net/ixgbe/ |
| H A D | ixgbe_rxtx.c | 160 buf_dma_addr = rte_mbuf_data_iova(*pkts); in tx4() 183 buf_dma_addr = rte_mbuf_data_iova(*pkts); in tx1() 901 buf_dma_addr = rte_mbuf_data_iova(m_seg); in ixgbe_xmit_pkts()
|
| /f-stack/dpdk/drivers/net/e1000/ |
| H A D | em_rxtx.c | 561 buf_dma_addr = rte_mbuf_data_iova(m_seg); in eth_em_xmit_pkts()
|
| /f-stack/dpdk/doc/guides/rel_notes/ |
| H A D | release_17_11.rst | 390 * ``rte_mbuf_data_dma_addr*()`` are aliased with ``rte_mbuf_data_iova*()``.
|