Lines Matching refs:t
17 struct test_perf *t = evt_test_priv(test); in perf_test_result() local
20 for (i = 0; i < t->nb_workers; i++) in perf_test_result()
21 total += t->worker[i].processed_pkts; in perf_test_result()
22 for (i = 0; i < t->nb_workers; i++) in perf_test_result()
25 t->worker[i].processed_pkts, in perf_test_result()
26 (((double)t->worker[i].processed_pkts)/total) in perf_test_result()
29 return t->result; in perf_test_result()
37 struct test_perf *t = p->t; in perf_producer() local
38 struct evt_options *opt = t->opt; in perf_producer()
41 struct rte_mempool *pool = t->pool; in perf_producer()
42 const uint64_t nb_pkts = t->nb_pkts; in perf_producer()
43 const uint32_t nb_flows = t->nb_flows; in perf_producer()
56 ev.sched_type = t->opt->sched_type_list[0]; in perf_producer()
61 while (count < nb_pkts && t->done == false) { in perf_producer()
70 if (t->done) in perf_producer()
89 struct test_perf *t = p->t; in perf_producer_burst() local
90 struct evt_options *opt = t->opt; in perf_producer_burst()
93 struct rte_mempool *pool = t->pool; in perf_producer_burst()
94 const uint64_t nb_pkts = t->nb_pkts; in perf_producer_burst()
95 const uint32_t nb_flows = t->nb_flows; in perf_producer_burst()
115 ev[i].sched_type = t->opt->sched_type_list[0]; in perf_producer_burst()
121 while (count < nb_pkts && t->done == false) { in perf_producer_burst()
135 if (t->done) in perf_producer_burst()
152 struct test_perf *t = p->t; in perf_event_timer_producer() local
153 struct evt_options *opt = t->opt; in perf_event_timer_producer()
158 const uint32_t nb_flows = t->nb_flows; in perf_event_timer_producer()
160 struct rte_mempool *pool = t->pool; in perf_event_timer_producer()
162 struct rte_event_timer_adapter **adptr = t->timer_adptr; in perf_event_timer_producer()
175 tim.ev.sched_type = t->opt->sched_type_list[0]; in perf_event_timer_producer()
184 while (count < nb_timers && t->done == false) { in perf_event_timer_producer()
196 if (t->done) in perf_event_timer_producer()
218 struct test_perf *t = p->t; in perf_event_timer_producer_burst() local
219 struct evt_options *opt = t->opt; in perf_event_timer_producer_burst()
224 const uint32_t nb_flows = t->nb_flows; in perf_event_timer_producer_burst()
226 struct rte_mempool *pool = t->pool; in perf_event_timer_producer_burst()
228 struct rte_event_timer_adapter **adptr = t->timer_adptr; in perf_event_timer_producer_burst()
241 tim.ev.sched_type = t->opt->sched_type_list[0]; in perf_event_timer_producer_burst()
250 while (count < nb_timers && t->done == false) { in perf_event_timer_producer_burst()
281 struct test_perf *t = p->t; in crypto_adapter_enq_op_new() local
282 const uint32_t nb_flows = t->nb_flows; in crypto_adapter_enq_op_new()
283 const uint64_t nb_pkts = t->nb_pkts; in crypto_adapter_enq_op_new()
284 struct rte_mempool *pool = t->pool; in crypto_adapter_enq_op_new()
286 struct evt_options *opt = t->opt; in crypto_adapter_enq_op_new()
302 while (count < nb_pkts && t->done == false) { in crypto_adapter_enq_op_new()
308 op = rte_crypto_op_alloc(t->ca_op_pool, in crypto_adapter_enq_op_new()
318 t->done == false) in crypto_adapter_enq_op_new()
331 struct test_perf *t = p->t; in crypto_adapter_enq_op_fwd() local
332 const uint32_t nb_flows = t->nb_flows; in crypto_adapter_enq_op_fwd()
333 const uint64_t nb_pkts = t->nb_pkts; in crypto_adapter_enq_op_fwd()
334 struct rte_mempool *pool = t->pool; in crypto_adapter_enq_op_fwd()
335 struct evt_options *opt = t->opt; in crypto_adapter_enq_op_fwd()
356 while (count < nb_pkts && t->done == false) { in crypto_adapter_enq_op_fwd()
362 op = rte_crypto_op_alloc(t->ca_op_pool, in crypto_adapter_enq_op_fwd()
373 t->done == false) in crypto_adapter_enq_op_fwd()
384 struct evt_options *opt = p->t->opt; in perf_event_crypto_producer()
398 struct test_perf *t = p->t; in perf_producer_wrapper() local
404 if (t->opt->prod_type == EVT_PROD_TYPE_SYNT && in perf_producer_wrapper()
405 t->opt->prod_enq_burst_sz == 1) in perf_producer_wrapper()
407 else if (t->opt->prod_type == EVT_PROD_TYPE_SYNT && in perf_producer_wrapper()
408 t->opt->prod_enq_burst_sz > 1) { in perf_producer_wrapper()
414 else if (t->opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR && in perf_producer_wrapper()
415 !t->opt->timdev_use_burst) in perf_producer_wrapper()
417 else if (t->opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR && in perf_producer_wrapper()
418 t->opt->timdev_use_burst) in perf_producer_wrapper()
420 else if (t->opt->prod_type == EVT_PROD_TYPE_EVENT_CRYPTO_ADPTR) in perf_producer_wrapper()
426 processed_pkts(struct test_perf *t) in processed_pkts() argument
431 for (i = 0; i < t->nb_workers; i++) in processed_pkts()
432 total += t->worker[i].processed_pkts; in processed_pkts()
438 total_latency(struct test_perf *t) in total_latency() argument
443 for (i = 0; i < t->nb_workers; i++) in total_latency()
444 total += t->worker[i].latency; in total_latency()
455 struct test_perf *t = evt_test_priv(test); in perf_launch_lcores() local
464 &t->worker[port_idx], lcore_id); in perf_launch_lcores()
478 &t->prod[port_idx], lcore_id); in perf_launch_lcores()
486 const uint64_t total_pkts = t->outstand_pkts; in perf_launch_lcores()
500 int64_t remaining = t->outstand_pkts - processed_pkts(t); in perf_launch_lcores()
502 while (t->done == false) { in perf_launch_lcores()
506 const uint64_t latency = total_latency(t); in perf_launch_lcores()
507 const uint64_t pkts = processed_pkts(t); in perf_launch_lcores()
509 remaining = t->outstand_pkts - pkts; in perf_launch_lcores()
527 t->result = EVT_TEST_SUCCESS; in perf_launch_lcores()
533 t->done = true; in perf_launch_lcores()
543 remaining = t->outstand_pkts - processed_pkts(t); in perf_launch_lcores()
547 t->done = true; in perf_launch_lcores()
612 perf_event_timer_adapter_setup(struct test_perf *t) in perf_event_timer_adapter_setup() argument
618 uint8_t nb_producers = evt_nr_active_lcores(t->opt->plcores); in perf_event_timer_adapter_setup()
624 for (i = 0; i < t->opt->nb_timer_adptrs; i++) { in perf_event_timer_adapter_setup()
626 .event_dev_id = t->opt->dev_id, in perf_event_timer_adapter_setup()
628 .timer_tick_ns = t->opt->timer_tick_nsec, in perf_event_timer_adapter_setup()
629 .max_tmo_ns = t->opt->max_tmo_nsec, in perf_event_timer_adapter_setup()
630 .nb_timers = t->opt->pool_sz, in perf_event_timer_adapter_setup()
643 t->opt->optm_timer_tick_nsec = adapter_info.min_resolution_ns; in perf_event_timer_adapter_setup()
659 t->timer_adptr[i] = wl; in perf_event_timer_adapter_setup()
665 perf_event_crypto_adapter_setup(struct test_perf *t, struct prod_data *p) in perf_event_crypto_adapter_setup() argument
667 struct evt_options *opt = t->opt; in perf_event_crypto_adapter_setup()
707 cryptodev_sym_sess_create(struct prod_data *p, struct test_perf *t) in cryptodev_sym_sess_create() argument
717 sess = rte_cryptodev_sym_session_create(t->ca_sess_pool); in cryptodev_sym_sess_create()
724 t->ca_sess_priv_pool)) { in cryptodev_sym_sess_create()
737 struct test_perf *t = evt_test_priv(test); in perf_event_dev_port_setup() local
744 struct worker_data *w = &t->worker[port]; in perf_event_dev_port_setup()
748 w->t = t; in perf_event_dev_port_setup()
771 struct prod_data *p = &t->prod[port]; in perf_event_dev_port_setup()
772 p->t = t; in perf_event_dev_port_setup()
784 struct prod_data *p = &t->prod[port]; in perf_event_dev_port_setup()
786 p->t = t; in perf_event_dev_port_setup()
790 ret = perf_event_timer_adapter_setup(t); in perf_event_dev_port_setup()
809 struct prod_data *p = &t->prod[port]; in perf_event_dev_port_setup()
823 NULL, sizeof(crypto_sess) * t->nb_flows, in perf_event_dev_port_setup()
825 p->t = t; in perf_event_dev_port_setup()
832 for (flow_id = 0; flow_id < t->nb_flows; flow_id++) { in perf_event_dev_port_setup()
833 crypto_sess = cryptodev_sym_sess_create(p, t); in perf_event_dev_port_setup()
853 ret = perf_event_crypto_adapter_setup(t, p); in perf_event_dev_port_setup()
863 struct prod_data *p = &t->prod[port]; in perf_event_dev_port_setup()
868 p->t = t; in perf_event_dev_port_setup()
1017 struct test_perf *t = evt_test_priv(test); in perf_eventdev_destroy() local
1021 rte_event_timer_adapter_stop(t->timer_adptr[i]); in perf_eventdev_destroy()
1041 struct test_perf *t = evt_test_priv(test); in perf_ethdev_setup() local
1091 rte_socket_id(), NULL, t->pool) < 0) { in perf_ethdev_setup()
1150 struct test_perf *t = evt_test_priv(test); in perf_cryptodev_setup() local
1164 t->ca_op_pool = rte_crypto_op_pool_create( in perf_cryptodev_setup()
1167 if (t->ca_op_pool == NULL) { in perf_cryptodev_setup()
1172 nb_sessions = evt_nr_active_lcores(opt->plcores) * t->nb_flows; in perf_cryptodev_setup()
1173 t->ca_sess_pool = rte_cryptodev_sym_session_pool_create( in perf_cryptodev_setup()
1176 if (t->ca_sess_pool == NULL) { in perf_cryptodev_setup()
1193 t->ca_sess_priv_pool = rte_mempool_create( in perf_cryptodev_setup()
1196 if (t->ca_sess_priv_pool == NULL) { in perf_cryptodev_setup()
1236 qp_conf.mp_session = t->ca_sess_pool; in perf_cryptodev_setup()
1237 qp_conf.mp_session_private = t->ca_sess_priv_pool; in perf_cryptodev_setup()
1256 rte_mempool_free(t->ca_op_pool); in perf_cryptodev_setup()
1257 rte_mempool_free(t->ca_sess_pool); in perf_cryptodev_setup()
1258 rte_mempool_free(t->ca_sess_priv_pool); in perf_cryptodev_setup()
1267 struct test_perf *t = evt_test_priv(test); in perf_cryptodev_destroy() local
1273 for (port = t->nb_workers; port < perf_nb_event_ports(opt); port++) { in perf_cryptodev_destroy()
1275 struct prod_data *p = &t->prod[port]; in perf_cryptodev_destroy()
1279 for (flow_id = 0; flow_id < t->nb_flows; flow_id++) { in perf_cryptodev_destroy()
1297 rte_mempool_free(t->ca_op_pool); in perf_cryptodev_destroy()
1298 rte_mempool_free(t->ca_sess_pool); in perf_cryptodev_destroy()
1299 rte_mempool_free(t->ca_sess_priv_pool); in perf_cryptodev_destroy()
1305 struct test_perf *t = evt_test_priv(test); in perf_mempool_setup() local
1309 t->pool = rte_mempool_create(test->name, /* mempool name */ in perf_mempool_setup()
1317 t->pool = rte_pktmbuf_pool_create(test->name, /* mempool name */ in perf_mempool_setup()
1326 if (t->pool == NULL) { in perf_mempool_setup()
1338 struct test_perf *t = evt_test_priv(test); in perf_mempool_destroy() local
1340 rte_mempool_free(t->pool); in perf_mempool_destroy()
1356 struct test_perf *t = evt_test_priv(test); in perf_test_setup() local
1359 t->outstand_pkts = opt->nb_timers * in perf_test_setup()
1361 t->nb_pkts = opt->nb_timers; in perf_test_setup()
1363 t->outstand_pkts = opt->nb_pkts * in perf_test_setup()
1365 t->nb_pkts = opt->nb_pkts; in perf_test_setup()
1368 t->nb_workers = evt_nr_active_lcores(opt->wlcores); in perf_test_setup()
1369 t->done = false; in perf_test_setup()
1370 t->nb_flows = opt->nb_flows; in perf_test_setup()
1371 t->result = EVT_TEST_FAILED; in perf_test_setup()
1372 t->opt = opt; in perf_test_setup()
1373 memcpy(t->sched_type_list, opt->sched_type_list, in perf_test_setup()