Lines Matching refs:cdata

79 	if (!cdata.quiet)  in worker_do_tx_single()
114 if (!cdata.quiet) in worker_do_tx_single_atq()
160 if (!cdata.quiet) in worker_do_tx_single_burst()
204 if (!cdata.quiet) in worker_do_tx_single_burst_atq()
220 const uint8_t lst_qid = cdata.num_stages - 1; in worker_do_tx()
232 const uint8_t cq_id = ev.queue_id % cdata.num_stages; in worker_do_tx()
243 cdata.next_qid[ev.queue_id] : ev.queue_id; in worker_do_tx()
245 ev.queue_id = cdata.next_qid[ev.queue_id]; in worker_do_tx()
246 worker_fwd_event(&ev, cdata.queue_type); in worker_do_tx()
254 if (!cdata.quiet) in worker_do_tx()
269 const uint8_t lst_qid = cdata.num_stages - 1; in worker_do_tx_atq()
280 const uint8_t cq_id = ev.sub_event_type % cdata.num_stages; in worker_do_tx_atq()
292 worker_fwd_event(&ev, cdata.queue_type); in worker_do_tx_atq()
300 if (!cdata.quiet) in worker_do_tx_atq()
315 uint8_t lst_qid = cdata.num_stages - 1; in worker_do_tx_burst()
330 const uint8_t cq_id = ev[i].queue_id % cdata.num_stages; in worker_do_tx_burst()
340 cdata.next_qid[ev[i].queue_id] : in worker_do_tx_burst()
345 ev[i].queue_id = cdata.next_qid[ev[i].queue_id]; in worker_do_tx_burst()
346 worker_fwd_event(&ev[i], cdata.queue_type); in worker_do_tx_burst()
355 if (!cdata.quiet) in worker_do_tx_burst()
370 uint8_t lst_qid = cdata.num_stages - 1; in worker_do_tx_burst_atq()
387 cdata.num_stages; in worker_do_tx_burst_atq()
400 worker_fwd_event(&ev[i], cdata.queue_type); in worker_do_tx_burst_atq()
409 if (!cdata.quiet) in worker_do_tx_burst_atq()
420 const uint8_t atq = cdata.all_type_queues ? 1 : 0; in setup_eventdev_worker_tx_enq()
422 const uint8_t nb_ports = cdata.num_workers; in setup_eventdev_worker_tx_enq()
432 nb_queues *= cdata.num_stages; in setup_eventdev_worker_tx_enq()
446 .dequeue_depth = cdata.worker_cq_depth, in setup_eventdev_worker_tx_enq()
451 .schedule_type = cdata.queue_type, in setup_eventdev_worker_tx_enq()
491 nb_slots = cdata.num_stages; in setup_eventdev_worker_tx_enq()
497 nb_slots = cdata.num_stages + 1; in setup_eventdev_worker_tx_enq()
499 wkr_q_conf.schedule_type = slot == cdata.num_stages ? in setup_eventdev_worker_tx_enq()
500 RTE_SCHED_TYPE_ATOMIC : cdata.queue_type; in setup_eventdev_worker_tx_enq()
507 cdata.qid[i] = i; in setup_eventdev_worker_tx_enq()
508 cdata.next_qid[i] = i+1; in setup_eventdev_worker_tx_enq()
509 if (cdata.enable_queue_priorities) { in setup_eventdev_worker_tx_enq()
542 for (i = 0; i < cdata.num_workers; i++) { in setup_eventdev_worker_tx_enq()
573 cdata.rx_stride = atq ? 1 : nb_slots; in setup_eventdev_worker_tx_enq()
716 if (!cdata.num_mbuf) in init_ports()
717 cdata.num_mbuf = 16384 * num_ports; in init_ports()
720 /* mbufs */ cdata.num_mbuf, in init_ports()
747 .dequeue_depth = cdata.worker_cq_depth, in init_adapters()
764 queue_conf.ev.sched_type = cdata.queue_type; in init_adapters()
782 queue_conf.ev.queue_id = cdata.rx_stride ? in init_adapters()
783 (i * cdata.rx_stride) in init_adapters()
784 : (uint8_t)cdata.qid[0]; in init_adapters()
820 ret = rte_event_eth_tx_adapter_create(cdata.tx_adapter_id, evdev_id, in init_adapters()
824 cdata.tx_adapter_id); in init_adapters()
827 ret = rte_event_eth_tx_adapter_queue_add(cdata.tx_adapter_id, i, in init_adapters()
834 ret = rte_event_eth_tx_adapter_start(cdata.tx_adapter_id); in init_adapters()
837 cdata.tx_adapter_id); in init_adapters()
881 if (cdata.all_type_queues && !(eventdev_info.event_dev_cap & in worker_tx_enq_opt_check()
897 if (cdata.worker_lcore_mask == 0 || in worker_tx_enq_opt_check()
898 (rx_needed && cdata.rx_lcore_mask == 0) || in worker_tx_enq_opt_check()
899 (sched_needed && cdata.sched_lcore_mask == 0)) { in worker_tx_enq_opt_check()
904 "\n\tworkers: %"PRIu64"\n", cdata.rx_lcore_mask, in worker_tx_enq_opt_check()
905 cdata.sched_lcore_mask, cdata.worker_lcore_mask); in worker_tx_enq_opt_check()
958 uint8_t atq = cdata.all_type_queues ? 1 : 0; in get_worker_single_stage()
969 uint8_t atq = cdata.all_type_queues ? 1 : 0; in get_worker_multi_stage()
980 if (cdata.num_stages == 1) in set_worker_tx_enq_setup_data()