| /dpdk/drivers/dma/skeleton/ |
| H A D | skeleton_dmadev.c | 83 (void)rte_ring_enqueue(hw->desc_completed, (void *)desc); in cpucopy_thread() 95 (void)rte_ring_enqueue(hw->desc_empty, (void *)desc); in fflush_ring() 204 (void)rte_ring_enqueue(empty, (void *)(desc + i)); in vchan_setup() 340 (void)rte_ring_enqueue(hw->desc_running, (void *)pend_desc); in submit() 345 (void)rte_ring_enqueue(hw->desc_running, (void *)desc); in submit() 370 (void)rte_ring_enqueue(hw->desc_pending, (void *)desc); in skeldma_copy() 404 (void)rte_ring_enqueue(hw->desc_empty, (void *)desc); in skeldma_completed() 428 (void)rte_ring_enqueue(hw->desc_empty, (void *)desc); in skeldma_completed_status()
|
| /dpdk/examples/multi_process/simple_mp/ |
| H A D | mp_commands.c | 51 if (rte_ring_enqueue(send_ring, msg) < 0) { in cmd_send_parsed()
|
| /dpdk/drivers/mempool/bucket/ |
| H A D | rte_mempool_bucket.c | 124 rc = rte_ring_enqueue(adopt_ring, obj); in bucket_enqueue_single() 131 rc = rte_ring_enqueue(bd->shared_bucket_ring, hdr); in bucket_enqueue_single() 209 rc = rte_ring_enqueue(bd->shared_orphan_ring, in bucket_dequeue_orphans()
|
| /dpdk/app/test/ |
| H A D | test_table.h | 70 rte_ring_enqueue((ring), m); \
|
| H A D | test_ring.h | 108 return rte_ring_enqueue(r, *obj); in test_ring_enqueue()
|
| H A D | test_table_pipeline.c | 450 rte_ring_enqueue(rings_rx[i], m); in test_pipeline_single_filter()
|
| H A D | test_table_acl.c | 672 rte_ring_enqueue(rings_rx[i], mbuf); in test_pipeline_single_filter()
|
| H A D | test_mbuf.c | 1037 rte_ring_enqueue(refcnt_mbuf_ring, m); in test_refcnt_iter() 1041 rte_ring_enqueue(refcnt_mbuf_ring, m); in test_refcnt_iter()
|
| H A D | test_event_timer_adapter.c | 801 while (rte_ring_enqueue(timer_producer_ring, ev_tim) != 0) in _cancel_producer()
|
| /dpdk/lib/ring/ |
| H A D | rte_ring.h | 340 rte_ring_enqueue(struct rte_ring *r, void *obj) in rte_ring_enqueue() function
|
| /dpdk/drivers/crypto/null/ |
| H A D | null_crypto_pmd.c | 69 return rte_ring_enqueue(qp->processed_pkts, (void *)op); in process_op()
|
| /dpdk/drivers/net/bonding/ |
| H A D | rte_eth_bond_8023ad.c | 630 int retval = rte_ring_enqueue(port->tx_ring, lacp_pkt); in tx_machine() 1366 if (rte_ring_enqueue(port->tx_ring, pkt) != 0) { in bond_mode_8023ad_handle_slow_pkt() 1386 if (rte_ring_enqueue(port->rx_ring, pkt) != 0) { in bond_mode_8023ad_handle_slow_pkt() 1670 return rte_ring_enqueue(port->tx_ring, lacp_pkt); in rte_eth_bond_8023ad_ext_slowtx()
|
| H A D | rte_eth_bond_pmd.c | 1253 rte_ring_enqueue(port->tx_ring, ctrl_pkt); in tx_burst_8023ad()
|
| /dpdk/drivers/compress/zlib/ |
| H A D | zlib_pmd.c | 215 return rte_ring_enqueue(qp->processed_pkts, (void *)op); in process_zlib_op()
|
| /dpdk/drivers/compress/octeontx/ |
| H A D | otx_zip_pmd.c | 511 ret = rte_ring_enqueue(qp->processed_pkts, (void *)op); in zip_pmd_enqueue_burst_sync()
|
| /dpdk/drivers/net/af_xdp/ |
| H A D | rte_eth_af_xdp.c | 399 rte_ring_enqueue(umem->buf_ring, (void *)addr); in af_xdp_rx_cp() 469 rte_ring_enqueue(umem->buf_ring, (void *)addr); in pull_umem_cq() 1142 rte_ring_enqueue(umem->buf_ring, in xdp_umem_configure()
|
| /dpdk/drivers/crypto/openssl/ |
| H A D | rte_openssl_pmd.c | 2029 retval = rte_ring_enqueue(qp->processed_ops, (void *)op); in process_asym_op() 2116 retval = rte_ring_enqueue(qp->processed_ops, (void *)op); in process_op()
|
| /dpdk/drivers/net/pcap/ |
| H A D | pcap_ethdev.c | 263 rte_ring_enqueue(pcap_q->pkts, pcap_buf); in eth_pcap_rx_infinite()
|
| /dpdk/lib/eventdev/ |
| H A D | rte_event_eth_rx_adapter.c | 1095 err = rte_ring_enqueue(rx_adapter->intr_ring, data); in rxa_intr_ring_enqueue() 1147 rte_ring_enqueue(rx_adapter->intr_ring, qd.ptr); in rxa_intr_ring_del_entries()
|
| /dpdk/drivers/raw/dpaa2_qdma/ |
| H A D | dpaa2_qdma.c | 1455 rte_ring_enqueue(temp_qdma_vq->status_ring, in dpaa2_qdma_dequeue()
|
| /dpdk/drivers/crypto/ccp/ |
| H A D | ccp_crypto.c | 2845 rte_ring_enqueue(qp->processed_pkts, (void *)b_info); in process_ops_to_enqueue()
|