Lines Matching refs:opt

12 perf_test_result(struct evt_test *test, struct evt_options *opt)  in perf_test_result()  argument
14 RTE_SET_USED(opt); in perf_test_result()
38 struct evt_options *opt = t->opt; in perf_producer() local
49 if (opt->verbose_level > 1) in perf_producer()
56 ev.sched_type = t->opt->sched_type_list[0]; in perf_producer()
90 struct evt_options *opt = t->opt; in perf_producer_burst() local
101 uint32_t burst_size = opt->prod_enq_burst_sz; in perf_producer_burst()
108 if (opt->verbose_level > 1) in perf_producer_burst()
115 ev[i].sched_type = t->opt->sched_type_list[0]; in perf_producer_burst()
153 struct evt_options *opt = t->opt; in perf_event_timer_producer() local
157 const uint8_t nb_timer_adptrs = opt->nb_timer_adptrs; in perf_event_timer_producer()
159 const uint64_t nb_timers = opt->nb_timers; in perf_event_timer_producer()
164 uint64_t timeout_ticks = opt->expiry_nsec / opt->timer_tick_nsec; in perf_event_timer_producer()
168 opt->optm_timer_tick_nsec in perf_event_timer_producer()
169 ? ceil((double)(timeout_ticks * opt->timer_tick_nsec) / in perf_event_timer_producer()
170 opt->optm_timer_tick_nsec) in perf_event_timer_producer()
175 tim.ev.sched_type = t->opt->sched_type_list[0]; in perf_event_timer_producer()
181 if (opt->verbose_level > 1) in perf_event_timer_producer()
219 struct evt_options *opt = t->opt; in perf_event_timer_producer_burst() local
223 const uint8_t nb_timer_adptrs = opt->nb_timer_adptrs; in perf_event_timer_producer_burst()
225 const uint64_t nb_timers = opt->nb_timers; in perf_event_timer_producer_burst()
230 uint64_t timeout_ticks = opt->expiry_nsec / opt->timer_tick_nsec; in perf_event_timer_producer_burst()
234 opt->optm_timer_tick_nsec in perf_event_timer_producer_burst()
235 ? ceil((double)(timeout_ticks * opt->timer_tick_nsec) / in perf_event_timer_producer_burst()
236 opt->optm_timer_tick_nsec) in perf_event_timer_producer_burst()
241 tim.ev.sched_type = t->opt->sched_type_list[0]; in perf_event_timer_producer_burst()
247 if (opt->verbose_level > 1) in perf_event_timer_producer_burst()
286 struct evt_options *opt = t->opt; in crypto_adapter_enq_op_new() local
295 if (opt->verbose_level > 1) in crypto_adapter_enq_op_new()
300 len = opt->mbuf_sz ? opt->mbuf_sz : RTE_ETHER_MIN_LEN; in crypto_adapter_enq_op_new()
335 struct evt_options *opt = t->opt; in crypto_adapter_enq_op_fwd() local
344 if (opt->verbose_level > 1) in crypto_adapter_enq_op_fwd()
354 len = opt->mbuf_sz ? opt->mbuf_sz : RTE_ETHER_MIN_LEN; in crypto_adapter_enq_op_fwd()
384 struct evt_options *opt = p->t->opt; in perf_event_crypto_producer() local
386 if (opt->crypto_adptr_mode == RTE_EVENT_CRYPTO_ADAPTER_OP_NEW) in perf_event_crypto_producer()
404 if (t->opt->prod_type == EVT_PROD_TYPE_SYNT && in perf_producer_wrapper()
405 t->opt->prod_enq_burst_sz == 1) in perf_producer_wrapper()
407 else if (t->opt->prod_type == EVT_PROD_TYPE_SYNT && in perf_producer_wrapper()
408 t->opt->prod_enq_burst_sz > 1) { in perf_producer_wrapper()
414 else if (t->opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR && in perf_producer_wrapper()
415 !t->opt->timdev_use_burst) in perf_producer_wrapper()
417 else if (t->opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR && in perf_producer_wrapper()
418 t->opt->timdev_use_burst) in perf_producer_wrapper()
420 else if (t->opt->prod_type == EVT_PROD_TYPE_EVENT_CRYPTO_ADPTR) in perf_producer_wrapper()
451 perf_launch_lcores(struct evt_test *test, struct evt_options *opt, in perf_launch_lcores() argument
460 if (!(opt->wlcores[lcore_id])) in perf_launch_lcores()
474 if (!(opt->plcores[lcore_id])) in perf_launch_lcores()
516 if (opt->fwd_latency && pkts > 0) { in perf_launch_lcores()
528 if (opt->prod_type == EVT_PROD_TYPE_SYNT || in perf_launch_lcores()
529 opt->prod_type == in perf_launch_lcores()
531 opt->prod_type == in perf_launch_lcores()
540 (opt->prod_type == EVT_PROD_TYPE_SYNT || in perf_launch_lcores()
541 opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR || in perf_launch_lcores()
542 opt->prod_type == EVT_PROD_TYPE_EVENT_CRYPTO_ADPTR)) { in perf_launch_lcores()
545 rte_event_dev_dump(opt->dev_id, stdout); in perf_launch_lcores()
559 perf_event_rx_adapter_setup(struct evt_options *opt, uint8_t stride, in perf_event_rx_adapter_setup() argument
568 queue_conf.ev.sched_type = opt->sched_type_list[0]; in perf_event_rx_adapter_setup()
572 ret = rte_event_eth_rx_adapter_caps_get(opt->dev_id, in perf_event_rx_adapter_setup()
577 opt->dev_id); in perf_event_rx_adapter_setup()
581 ret = rte_event_eth_rx_adapter_create(prod, opt->dev_id, in perf_event_rx_adapter_setup()
618 uint8_t nb_producers = evt_nr_active_lcores(t->opt->plcores); in perf_event_timer_adapter_setup()
624 for (i = 0; i < t->opt->nb_timer_adptrs; i++) { in perf_event_timer_adapter_setup()
626 .event_dev_id = t->opt->dev_id, in perf_event_timer_adapter_setup()
628 .timer_tick_ns = t->opt->timer_tick_nsec, in perf_event_timer_adapter_setup()
629 .max_tmo_ns = t->opt->max_tmo_nsec, in perf_event_timer_adapter_setup()
630 .nb_timers = t->opt->pool_sz, in perf_event_timer_adapter_setup()
643 t->opt->optm_timer_tick_nsec = adapter_info.min_resolution_ns; in perf_event_timer_adapter_setup()
667 struct evt_options *opt = t->opt; in perf_event_crypto_adapter_setup() local
677 if (((opt->crypto_adptr_mode == RTE_EVENT_CRYPTO_ADAPTER_OP_NEW) && in perf_event_crypto_adapter_setup()
679 ((opt->crypto_adptr_mode == RTE_EVENT_CRYPTO_ADAPTER_OP_FORWARD) && in perf_event_crypto_adapter_setup()
682 opt->crypto_adptr_mode ? "OP_FORWARD" : "OP_NEW"); in perf_event_crypto_adapter_setup()
733 perf_event_dev_port_setup(struct evt_test *test, struct evt_options *opt, in perf_event_dev_port_setup() argument
742 for (port = 0; port < evt_nr_active_lcores(opt->wlcores); in perf_event_dev_port_setup()
746 w->dev_id = opt->dev_id; in perf_event_dev_port_setup()
755 ret = rte_event_port_setup(opt->dev_id, port, &conf); in perf_event_dev_port_setup()
761 ret = rte_event_port_link(opt->dev_id, port, NULL, NULL, 0); in perf_event_dev_port_setup()
769 if (opt->prod_type == EVT_PROD_TYPE_ETH_RX_ADPTR) { in perf_event_dev_port_setup()
770 for ( ; port < perf_nb_event_ports(opt); port++) { in perf_event_dev_port_setup()
778 ret = perf_event_rx_adapter_setup(opt, stride, conf); in perf_event_dev_port_setup()
781 } else if (opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR) { in perf_event_dev_port_setup()
783 for ( ; port < perf_nb_event_ports(opt); port++) { in perf_event_dev_port_setup()
793 } else if (opt->prod_type == EVT_PROD_TYPE_EVENT_CRYPTO_ADPTR) { in perf_event_dev_port_setup()
799 opt->dev_id, &conf, 0); in perf_event_dev_port_setup()
806 for (; port < perf_nb_event_ports(opt); port++) { in perf_event_dev_port_setup()
817 p->dev_id = opt->dev_id; in perf_event_dev_port_setup()
824 RTE_CACHE_LINE_SIZE, opt->socket_id); in perf_event_dev_port_setup()
847 ret = rte_event_port_setup(opt->dev_id, port, &conf); in perf_event_dev_port_setup()
862 for ( ; port < perf_nb_event_ports(opt); port++) { in perf_event_dev_port_setup()
865 p->dev_id = opt->dev_id; in perf_event_dev_port_setup()
875 ret = rte_event_port_setup(opt->dev_id, port, &conf); in perf_event_dev_port_setup()
888 perf_opt_check(struct evt_options *opt, uint64_t nb_queues) in perf_opt_check() argument
895 lcores = opt->prod_type == EVT_PROD_TYPE_SYNT ? 3 : 2; in perf_opt_check()
903 if (evt_lcores_has_overlap(opt->wlcores, rte_get_main_lcore())) { in perf_opt_check()
907 if (evt_lcores_has_overlap_multi(opt->wlcores, opt->plcores)) { in perf_opt_check()
911 if (evt_has_disabled_lcore(opt->wlcores)) { in perf_opt_check()
915 if (!evt_has_active_lcore(opt->wlcores)) { in perf_opt_check()
920 if (opt->prod_type == EVT_PROD_TYPE_SYNT || in perf_opt_check()
921 opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR || in perf_opt_check()
922 opt->prod_type == EVT_PROD_TYPE_EVENT_CRYPTO_ADPTR) { in perf_opt_check()
924 if (evt_lcores_has_overlap(opt->plcores, in perf_opt_check()
929 if (evt_has_disabled_lcore(opt->plcores)) { in perf_opt_check()
933 if (!evt_has_active_lcore(opt->plcores)) { in perf_opt_check()
939 if (evt_has_invalid_stage(opt)) in perf_opt_check()
942 if (evt_has_invalid_sched_type(opt)) in perf_opt_check()
949 if (perf_nb_event_ports(opt) > EVT_MAX_PORTS) { in perf_opt_check()
955 if ((opt->nb_stages == 1 && in perf_opt_check()
956 opt->prod_type != EVT_PROD_TYPE_EVENT_TIMER_ADPTR) && in perf_opt_check()
957 opt->fwd_latency) { in perf_opt_check()
959 opt->fwd_latency = 0; in perf_opt_check()
962 if (opt->fwd_latency && !opt->q_priority) { in perf_opt_check()
964 opt->q_priority = 1; in perf_opt_check()
966 if (opt->nb_pkts == 0) in perf_opt_check()
967 opt->nb_pkts = INT64_MAX/evt_nr_active_lcores(opt->plcores); in perf_opt_check()
973 perf_opt_dump(struct evt_options *opt, uint8_t nb_queues) in perf_opt_dump() argument
975 evt_dump("nb_prod_lcores", "%d", evt_nr_active_lcores(opt->plcores)); in perf_opt_dump()
976 evt_dump_producer_lcores(opt); in perf_opt_dump()
977 evt_dump("nb_worker_lcores", "%d", evt_nr_active_lcores(opt->wlcores)); in perf_opt_dump()
978 evt_dump_worker_lcores(opt); in perf_opt_dump()
979 evt_dump_nb_stages(opt); in perf_opt_dump()
980 evt_dump("nb_evdev_ports", "%d", perf_nb_event_ports(opt)); in perf_opt_dump()
982 evt_dump_queue_priority(opt); in perf_opt_dump()
983 evt_dump_sched_type_list(opt); in perf_opt_dump()
984 evt_dump_producer_type(opt); in perf_opt_dump()
985 evt_dump("prod_enq_burst_sz", "%d", opt->prod_enq_burst_sz); in perf_opt_dump()
1014 perf_eventdev_destroy(struct evt_test *test, struct evt_options *opt) in perf_eventdev_destroy() argument
1019 if (opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR) { in perf_eventdev_destroy()
1020 for (i = 0; i < opt->nb_timer_adptrs; i++) in perf_eventdev_destroy()
1023 rte_event_dev_stop(opt->dev_id); in perf_eventdev_destroy()
1024 rte_event_dev_close(opt->dev_id); in perf_eventdev_destroy()
1037 perf_ethdev_setup(struct evt_test *test, struct evt_options *opt) in perf_ethdev_setup() argument
1055 if (opt->prod_type != EVT_PROD_TYPE_ETH_RX_ADPTR) in perf_ethdev_setup()
1116 perf_ethdev_rx_stop(struct evt_test *test, struct evt_options *opt) in perf_ethdev_rx_stop() argument
1121 if (opt->prod_type == EVT_PROD_TYPE_ETH_RX_ADPTR) { in perf_ethdev_rx_stop()
1131 perf_ethdev_destroy(struct evt_test *test, struct evt_options *opt) in perf_ethdev_destroy() argument
1136 if (opt->prod_type == EVT_PROD_TYPE_ETH_RX_ADPTR) { in perf_ethdev_destroy()
1147 perf_cryptodev_setup(struct evt_test *test, struct evt_options *opt) in perf_cryptodev_setup() argument
1155 if (opt->prod_type != EVT_PROD_TYPE_EVENT_CRYPTO_ADPTR) in perf_cryptodev_setup()
1165 "crypto_op_pool", RTE_CRYPTO_OP_TYPE_SYMMETRIC, opt->pool_sz, in perf_cryptodev_setup()
1172 nb_sessions = evt_nr_active_lcores(opt->plcores) * t->nb_flows; in perf_cryptodev_setup()
1208 nb_plcores = evt_nr_active_lcores(opt->plcores); in perf_cryptodev_setup()
1264 perf_cryptodev_destroy(struct evt_test *test, struct evt_options *opt) in perf_cryptodev_destroy() argument
1270 if (opt->prod_type != EVT_PROD_TYPE_EVENT_CRYPTO_ADPTR) in perf_cryptodev_destroy()
1273 for (port = t->nb_workers; port < perf_nb_event_ports(opt); port++) { in perf_cryptodev_destroy()
1303 perf_mempool_setup(struct evt_test *test, struct evt_options *opt) in perf_mempool_setup() argument
1307 if (opt->prod_type == EVT_PROD_TYPE_SYNT || in perf_mempool_setup()
1308 opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR) { in perf_mempool_setup()
1310 opt->pool_sz, /* number of elements*/ in perf_mempool_setup()
1315 NULL, opt->socket_id, 0); /* flags */ in perf_mempool_setup()
1318 opt->pool_sz, /* number of elements*/ in perf_mempool_setup()
1322 opt->socket_id); /* flags */ in perf_mempool_setup()
1335 perf_mempool_destroy(struct evt_test *test, struct evt_options *opt) in perf_mempool_destroy() argument
1337 RTE_SET_USED(opt); in perf_mempool_destroy()
1344 perf_test_setup(struct evt_test *test, struct evt_options *opt) in perf_test_setup() argument
1349 RTE_CACHE_LINE_SIZE, opt->socket_id); in perf_test_setup()
1358 if (opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR) { in perf_test_setup()
1359 t->outstand_pkts = opt->nb_timers * in perf_test_setup()
1360 evt_nr_active_lcores(opt->plcores); in perf_test_setup()
1361 t->nb_pkts = opt->nb_timers; in perf_test_setup()
1363 t->outstand_pkts = opt->nb_pkts * in perf_test_setup()
1364 evt_nr_active_lcores(opt->plcores); in perf_test_setup()
1365 t->nb_pkts = opt->nb_pkts; in perf_test_setup()
1368 t->nb_workers = evt_nr_active_lcores(opt->wlcores); in perf_test_setup()
1370 t->nb_flows = opt->nb_flows; in perf_test_setup()
1372 t->opt = opt; in perf_test_setup()
1373 memcpy(t->sched_type_list, opt->sched_type_list, in perf_test_setup()
1374 sizeof(opt->sched_type_list)); in perf_test_setup()
1381 perf_test_destroy(struct evt_test *test, struct evt_options *opt) in perf_test_destroy() argument
1383 RTE_SET_USED(opt); in perf_test_destroy()