Lines Matching refs:queue_id
3816 rte_eth_tx_done_cleanup(uint16_t port_id, uint16_t queue_id, uint32_t free_cnt);
3947 int rte_eth_dev_rx_intr_enable(uint16_t port_id, uint16_t queue_id);
3969 int rte_eth_dev_rx_intr_disable(uint16_t port_id, uint16_t queue_id);
4011 int rte_eth_dev_rx_intr_ctl_q(uint16_t port_id, uint16_t queue_id,
4029 rte_eth_dev_rx_intr_ctl_q_get_fd(uint16_t port_id, uint16_t queue_id);
4533 rte_eth_add_rx_callback(uint16_t port_id, uint16_t queue_id,
4563 rte_eth_add_first_rx_callback(uint16_t port_id, uint16_t queue_id,
4592 rte_eth_add_tx_callback(uint16_t port_id, uint16_t queue_id,
4628 int rte_eth_remove_rx_callback(uint16_t port_id, uint16_t queue_id,
4664 int rte_eth_remove_tx_callback(uint16_t port_id, uint16_t queue_id,
4686 int rte_eth_rx_queue_info_get(uint16_t port_id, uint16_t queue_id,
4708 int rte_eth_tx_queue_info_get(uint16_t port_id, uint16_t queue_id,
4729 int rte_eth_rx_burst_mode_get(uint16_t port_id, uint16_t queue_id,
4750 int rte_eth_tx_burst_mode_get(uint16_t port_id, uint16_t queue_id,
4774 int rte_eth_get_monitor_addr(uint16_t port_id, uint16_t queue_id,
5480 uint16_t rte_eth_call_rx_callbacks(uint16_t port_id, uint16_t queue_id,
5572 rte_eth_rx_burst(uint16_t port_id, uint16_t queue_id, in rte_eth_rx_burst() argument
5581 queue_id >= RTE_MAX_QUEUES_PER_PORT) { in rte_eth_rx_burst()
5584 port_id, queue_id); in rte_eth_rx_burst()
5591 qd = p->rxq.data[queue_id]; in rte_eth_rx_burst()
5598 queue_id, port_id); in rte_eth_rx_burst()
5615 cb = __atomic_load_n((void **)&p->rxq.clbk[queue_id], in rte_eth_rx_burst()
5618 nb_rx = rte_eth_call_rx_callbacks(port_id, queue_id, in rte_eth_rx_burst()
5623 rte_ethdev_trace_rx_burst(port_id, queue_id, (void **)rx_pkts, nb_rx); in rte_eth_rx_burst()
5641 rte_eth_rx_queue_count(uint16_t port_id, uint16_t queue_id) in rte_eth_rx_queue_count() argument
5647 queue_id >= RTE_MAX_QUEUES_PER_PORT) { in rte_eth_rx_queue_count()
5650 port_id, queue_id); in rte_eth_rx_queue_count()
5656 qd = p->rxq.data[queue_id]; in rte_eth_rx_queue_count()
5708 rte_eth_rx_descriptor_status(uint16_t port_id, uint16_t queue_id, in rte_eth_rx_descriptor_status() argument
5716 queue_id >= RTE_MAX_QUEUES_PER_PORT) { in rte_eth_rx_descriptor_status()
5719 port_id, queue_id); in rte_eth_rx_descriptor_status()
5726 qd = p->rxq.data[queue_id]; in rte_eth_rx_descriptor_status()
5779 uint16_t queue_id, uint16_t offset) in rte_eth_tx_descriptor_status() argument
5786 queue_id >= RTE_MAX_QUEUES_PER_PORT) { in rte_eth_tx_descriptor_status()
5789 port_id, queue_id); in rte_eth_tx_descriptor_status()
5796 qd = p->txq.data[queue_id]; in rte_eth_tx_descriptor_status()
5826 uint16_t rte_eth_call_tx_callbacks(uint16_t port_id, uint16_t queue_id,
5896 rte_eth_tx_burst(uint16_t port_id, uint16_t queue_id, in rte_eth_tx_burst() argument
5904 queue_id >= RTE_MAX_QUEUES_PER_PORT) { in rte_eth_tx_burst()
5907 port_id, queue_id); in rte_eth_tx_burst()
5914 qd = p->txq.data[queue_id]; in rte_eth_tx_burst()
5921 queue_id, port_id); in rte_eth_tx_burst()
5936 cb = __atomic_load_n((void **)&p->txq.clbk[queue_id], in rte_eth_tx_burst()
5939 nb_pkts = rte_eth_call_tx_callbacks(port_id, queue_id, in rte_eth_tx_burst()
5946 rte_ethdev_trace_tx_burst(port_id, queue_id, (void **)tx_pkts, nb_pkts); in rte_eth_tx_burst()
6007 rte_eth_tx_prepare(uint16_t port_id, uint16_t queue_id, in rte_eth_tx_prepare() argument
6015 queue_id >= RTE_MAX_QUEUES_PER_PORT) { in rte_eth_tx_prepare()
6018 port_id, queue_id); in rte_eth_tx_prepare()
6026 qd = p->txq.data[queue_id]; in rte_eth_tx_prepare()
6036 queue_id, port_id); in rte_eth_tx_prepare()
6061 __rte_unused uint16_t queue_id, in rte_eth_tx_prepare() argument
6092 rte_eth_tx_buffer_flush(uint16_t port_id, uint16_t queue_id, in rte_eth_tx_buffer_flush() argument
6101 sent = rte_eth_tx_burst(port_id, queue_id, buffer->pkts, to_send); in rte_eth_tx_buffer_flush()
6145 rte_eth_tx_buffer(uint16_t port_id, uint16_t queue_id, in rte_eth_tx_buffer() argument
6152 return rte_eth_tx_buffer_flush(port_id, queue_id, buffer); in rte_eth_tx_buffer()