Lines Matching refs:opt
8 perf_test_result(struct evt_test *test, struct evt_options *opt) in perf_test_result() argument
10 RTE_SET_USED(opt); in perf_test_result()
34 struct evt_options *opt = t->opt; in perf_producer() local
45 if (opt->verbose_level > 1) in perf_producer()
52 ev.sched_type = t->opt->sched_type_list[0]; in perf_producer()
84 struct evt_options *opt = t->opt; in perf_event_timer_producer() local
88 const uint8_t nb_timer_adptrs = opt->nb_timer_adptrs; in perf_event_timer_producer()
90 const uint64_t nb_timers = opt->nb_timers; in perf_event_timer_producer()
95 uint64_t timeout_ticks = opt->expiry_nsec / opt->timer_tick_nsec; in perf_event_timer_producer()
98 timeout_ticks = opt->optm_timer_tick_nsec ? in perf_event_timer_producer()
99 (timeout_ticks * opt->timer_tick_nsec) in perf_event_timer_producer()
100 / opt->optm_timer_tick_nsec : timeout_ticks; in perf_event_timer_producer()
104 tim.ev.sched_type = t->opt->sched_type_list[0]; in perf_event_timer_producer()
110 if (opt->verbose_level > 1) in perf_event_timer_producer()
148 struct evt_options *opt = t->opt; in perf_event_timer_producer_burst() local
152 const uint8_t nb_timer_adptrs = opt->nb_timer_adptrs; in perf_event_timer_producer_burst()
154 const uint64_t nb_timers = opt->nb_timers; in perf_event_timer_producer_burst()
159 uint64_t timeout_ticks = opt->expiry_nsec / opt->timer_tick_nsec; in perf_event_timer_producer_burst()
162 timeout_ticks = opt->optm_timer_tick_nsec ? in perf_event_timer_producer_burst()
163 (timeout_ticks * opt->timer_tick_nsec) in perf_event_timer_producer_burst()
164 / opt->optm_timer_tick_nsec : timeout_ticks; in perf_event_timer_producer_burst()
168 tim.ev.sched_type = t->opt->sched_type_list[0]; in perf_event_timer_producer_burst()
174 if (opt->verbose_level > 1) in perf_event_timer_producer_burst()
210 if (t->opt->prod_type == EVT_PROD_TYPE_SYNT) in perf_producer_wrapper()
212 else if (t->opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR && in perf_producer_wrapper()
213 !t->opt->timdev_use_burst) in perf_producer_wrapper()
215 else if (t->opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR && in perf_producer_wrapper()
216 t->opt->timdev_use_burst) in perf_producer_wrapper()
249 perf_launch_lcores(struct evt_test *test, struct evt_options *opt, in perf_launch_lcores() argument
258 if (!(opt->wlcores[lcore_id])) in perf_launch_lcores()
272 if (!(opt->plcores[lcore_id])) in perf_launch_lcores()
314 if (opt->fwd_latency && pkts > 0) { in perf_launch_lcores()
326 if (opt->prod_type == EVT_PROD_TYPE_SYNT || in perf_launch_lcores()
327 opt->prod_type == in perf_launch_lcores()
337 (opt->prod_type == EVT_PROD_TYPE_SYNT || in perf_launch_lcores()
338 opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR)) { in perf_launch_lcores()
341 rte_event_dev_dump(opt->dev_id, stdout); in perf_launch_lcores()
356 perf_event_rx_adapter_setup(struct evt_options *opt, uint8_t stride, in perf_event_rx_adapter_setup() argument
365 queue_conf.ev.sched_type = opt->sched_type_list[0]; in perf_event_rx_adapter_setup()
369 ret = rte_event_eth_rx_adapter_caps_get(opt->dev_id, in perf_event_rx_adapter_setup()
374 opt->dev_id); in perf_event_rx_adapter_setup()
378 ret = rte_event_eth_rx_adapter_create(prod, opt->dev_id, in perf_event_rx_adapter_setup()
415 uint8_t nb_producers = evt_nr_active_lcores(t->opt->plcores); in perf_event_timer_adapter_setup()
421 for (i = 0; i < t->opt->nb_timer_adptrs; i++) { in perf_event_timer_adapter_setup()
423 .event_dev_id = t->opt->dev_id, in perf_event_timer_adapter_setup()
425 .timer_tick_ns = t->opt->timer_tick_nsec, in perf_event_timer_adapter_setup()
426 .max_tmo_ns = t->opt->max_tmo_nsec, in perf_event_timer_adapter_setup()
427 .nb_timers = t->opt->pool_sz, in perf_event_timer_adapter_setup()
440 t->opt->optm_timer_tick_nsec = adapter_info.min_resolution_ns; in perf_event_timer_adapter_setup()
462 perf_event_dev_port_setup(struct evt_test *test, struct evt_options *opt, in perf_event_dev_port_setup() argument
471 for (port = 0; port < evt_nr_active_lcores(opt->wlcores); in perf_event_dev_port_setup()
475 w->dev_id = opt->dev_id; in perf_event_dev_port_setup()
481 ret = rte_event_port_setup(opt->dev_id, port, port_conf); in perf_event_dev_port_setup()
487 ret = rte_event_port_link(opt->dev_id, port, NULL, NULL, 0); in perf_event_dev_port_setup()
495 if (opt->prod_type == EVT_PROD_TYPE_ETH_RX_ADPTR) { in perf_event_dev_port_setup()
496 for ( ; port < perf_nb_event_ports(opt); port++) { in perf_event_dev_port_setup()
501 ret = perf_event_rx_adapter_setup(opt, stride, *port_conf); in perf_event_dev_port_setup()
504 } else if (opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR) { in perf_event_dev_port_setup()
506 for ( ; port < perf_nb_event_ports(opt); port++) { in perf_event_dev_port_setup()
518 for ( ; port < perf_nb_event_ports(opt); port++) { in perf_event_dev_port_setup()
521 p->dev_id = opt->dev_id; in perf_event_dev_port_setup()
526 ret = rte_event_port_setup(opt->dev_id, port, in perf_event_dev_port_setup()
540 perf_opt_check(struct evt_options *opt, uint64_t nb_queues) in perf_opt_check() argument
547 lcores = opt->prod_type == EVT_PROD_TYPE_SYNT ? 3 : 2; in perf_opt_check()
555 if (evt_lcores_has_overlap(opt->wlcores, rte_get_main_lcore())) { in perf_opt_check()
559 if (evt_lcores_has_overlap_multi(opt->wlcores, opt->plcores)) { in perf_opt_check()
563 if (evt_has_disabled_lcore(opt->wlcores)) { in perf_opt_check()
567 if (!evt_has_active_lcore(opt->wlcores)) { in perf_opt_check()
572 if (opt->prod_type == EVT_PROD_TYPE_SYNT || in perf_opt_check()
573 opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR) { in perf_opt_check()
575 if (evt_lcores_has_overlap(opt->plcores, in perf_opt_check()
580 if (evt_has_disabled_lcore(opt->plcores)) { in perf_opt_check()
584 if (!evt_has_active_lcore(opt->plcores)) { in perf_opt_check()
590 if (evt_has_invalid_stage(opt)) in perf_opt_check()
593 if (evt_has_invalid_sched_type(opt)) in perf_opt_check()
600 if (perf_nb_event_ports(opt) > EVT_MAX_PORTS) { in perf_opt_check()
606 if ((opt->nb_stages == 1 && in perf_opt_check()
607 opt->prod_type != EVT_PROD_TYPE_EVENT_TIMER_ADPTR) && in perf_opt_check()
608 opt->fwd_latency) { in perf_opt_check()
610 opt->fwd_latency = 0; in perf_opt_check()
613 if (opt->fwd_latency && !opt->q_priority) { in perf_opt_check()
615 opt->q_priority = 1; in perf_opt_check()
617 if (opt->nb_pkts == 0) in perf_opt_check()
618 opt->nb_pkts = INT64_MAX/evt_nr_active_lcores(opt->plcores); in perf_opt_check()
624 perf_opt_dump(struct evt_options *opt, uint8_t nb_queues) in perf_opt_dump() argument
626 evt_dump("nb_prod_lcores", "%d", evt_nr_active_lcores(opt->plcores)); in perf_opt_dump()
627 evt_dump_producer_lcores(opt); in perf_opt_dump()
628 evt_dump("nb_worker_lcores", "%d", evt_nr_active_lcores(opt->wlcores)); in perf_opt_dump()
629 evt_dump_worker_lcores(opt); in perf_opt_dump()
630 evt_dump_nb_stages(opt); in perf_opt_dump()
631 evt_dump("nb_evdev_ports", "%d", perf_nb_event_ports(opt)); in perf_opt_dump()
633 evt_dump_queue_priority(opt); in perf_opt_dump()
634 evt_dump_sched_type_list(opt); in perf_opt_dump()
635 evt_dump_producer_type(opt); in perf_opt_dump()
639 perf_eventdev_destroy(struct evt_test *test, struct evt_options *opt) in perf_eventdev_destroy() argument
644 if (opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR) { in perf_eventdev_destroy()
645 for (i = 0; i < opt->nb_timer_adptrs; i++) in perf_eventdev_destroy()
648 rte_event_dev_stop(opt->dev_id); in perf_eventdev_destroy()
649 rte_event_dev_close(opt->dev_id); in perf_eventdev_destroy()
662 perf_ethdev_setup(struct evt_test *test, struct evt_options *opt) in perf_ethdev_setup() argument
681 if (opt->prod_type == EVT_PROD_TYPE_SYNT || in perf_ethdev_setup()
682 opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR) in perf_ethdev_setup()
742 void perf_ethdev_destroy(struct evt_test *test, struct evt_options *opt) in perf_ethdev_destroy() argument
747 if (opt->prod_type == EVT_PROD_TYPE_ETH_RX_ADPTR) { in perf_ethdev_destroy()
756 perf_mempool_setup(struct evt_test *test, struct evt_options *opt) in perf_mempool_setup() argument
760 if (opt->prod_type == EVT_PROD_TYPE_SYNT || in perf_mempool_setup()
761 opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR) { in perf_mempool_setup()
763 opt->pool_sz, /* number of elements*/ in perf_mempool_setup()
768 NULL, opt->socket_id, 0); /* flags */ in perf_mempool_setup()
771 opt->pool_sz, /* number of elements*/ in perf_mempool_setup()
775 opt->socket_id); /* flags */ in perf_mempool_setup()
788 perf_mempool_destroy(struct evt_test *test, struct evt_options *opt) in perf_mempool_destroy() argument
790 RTE_SET_USED(opt); in perf_mempool_destroy()
797 perf_test_setup(struct evt_test *test, struct evt_options *opt) in perf_test_setup() argument
802 RTE_CACHE_LINE_SIZE, opt->socket_id); in perf_test_setup()
811 if (opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR) { in perf_test_setup()
812 t->outstand_pkts = opt->nb_timers * in perf_test_setup()
813 evt_nr_active_lcores(opt->plcores); in perf_test_setup()
814 t->nb_pkts = opt->nb_timers; in perf_test_setup()
816 t->outstand_pkts = opt->nb_pkts * in perf_test_setup()
817 evt_nr_active_lcores(opt->plcores); in perf_test_setup()
818 t->nb_pkts = opt->nb_pkts; in perf_test_setup()
821 t->nb_workers = evt_nr_active_lcores(opt->wlcores); in perf_test_setup()
823 t->nb_flows = opt->nb_flows; in perf_test_setup()
825 t->opt = opt; in perf_test_setup()
826 memcpy(t->sched_type_list, opt->sched_type_list, in perf_test_setup()
827 sizeof(opt->sched_type_list)); in perf_test_setup()
834 perf_test_destroy(struct evt_test *test, struct evt_options *opt) in perf_test_destroy() argument
836 RTE_SET_USED(opt); in perf_test_destroy()