| /dpdk/app/test-eventdev/ |
| H A D | evt_options.c | 22 memset(opt, 0, sizeof(*opt)); in evt_options_default() 24 opt->dev_id = 0; in evt_options_default() 26 opt->nb_flows = 1024; in evt_options_default() 30 opt->wkr_deq_dep = 16; in evt_options_default() 32 opt->nb_timers = 1E8; in evt_options_default() 38 opt->eth_queues = 1; in evt_options_default() 39 opt->vector_size = 64; in evt_options_default() 81 opt->fwd_latency = 1; in evt_parse_fwd_latency() 88 opt->q_priority = 1; in evt_parse_queue_priority() 287 opt->ena_vector = 1; in evt_parse_ena_vector() [all …]
|
| H A D | test_pipeline_common.c | 11 RTE_SET_USED(opt); in pipeline_test_result() 41 if (opt->ena_vector) { in pipeline_opt_dump() 339 unsigned int nb_elem = (opt->pool_sz / opt->vector_size) << 1; in pipeline_event_rx_adapter_setup() 344 opt->socket_id); in pipeline_event_rx_adapter_setup() 359 opt->dev_id); in pipeline_event_rx_adapter_setup() 580 RTE_SET_USED(opt); in pipeline_ethdev_destroy() 605 if (!opt->mbuf_sz) in pipeline_mempool_setup() 641 0, opt->mbuf_sz, opt->socket_id); /* flags */ in pipeline_mempool_setup() 655 0, opt->mbuf_sz, opt->socket_id); /* flags */ in pipeline_mempool_setup() 672 RTE_SET_USED(opt); in pipeline_mempool_destroy() [all …]
|
| H A D | test_order_common.c | 10 RTE_SET_USED(opt); in order_test_result() 21 struct evt_options *opt = t->opt; in order_producer() local 32 if (opt->verbose_level > 1) in order_producer() 128 if (opt->nb_pkts == 0) in order_opt_check() 129 opt->nb_pkts = INT64_MAX; in order_opt_check() 192 t->nb_pkts = opt->nb_pkts; in order_test_setup() 195 t->opt = opt; in order_test_setup() 209 RTE_SET_USED(opt); in order_test_destroy() 225 opt->socket_id); in order_mempool_setup() 237 RTE_SET_USED(opt); in order_mempool_destroy() [all …]
|
| H A D | test_perf_common.c | 14 RTE_SET_USED(opt); in perf_test_result() 38 struct evt_options *opt = t->opt; in perf_producer() local 90 struct evt_options *opt = t->opt; in perf_producer_burst() local 153 struct evt_options *opt = t->opt; in perf_event_timer_producer() local 219 struct evt_options *opt = t->opt; in perf_event_timer_producer_burst() local 286 struct evt_options *opt = t->opt; in crypto_adapter_enq_op_new() local 335 struct evt_options *opt = t->opt; in crypto_adapter_enq_op_fwd() local 384 struct evt_options *opt = p->t->opt; in perf_event_crypto_producer() local 667 struct evt_options *opt = t->opt; in perf_event_crypto_adapter_setup() local 962 if (opt->fwd_latency && !opt->q_priority) { in perf_opt_check() [all …]
|
| H A D | evt_options.h | 134 if (!opt->nb_stages) { in evt_has_invalid_stage() 138 if (opt->nb_stages > EVT_MAX_STAGES) { in evt_has_invalid_stage() 154 opt->sched_type_list[i], i); in evt_has_invalid_sched_type() 169 if (opt->wlcores[c]) in evt_dump_worker_lcores() 182 if (opt->plcores[c]) in evt_dump_producer_lcores() 238 for (i = 0; i < opt->nb_stages; i++) in evt_dump_sched_type_list() 268 switch (opt->prod_type) { in evt_dump_producer_type() 280 if (opt->timdev_use_burst) in evt_dump_producer_type() 289 if (opt->optm_timer_tick_nsec) in evt_dump_producer_type() 291 opt->optm_timer_tick_nsec); in evt_dump_producer_type() [all …]
|
| H A D | evt_main.c | 16 struct evt_options opt; variable 62 evt_options_default(&opt); in main() 72 test = evt_test_get(opt.test_name); in main() 89 if (test->ops.opt_check(&opt)) { in main() 91 evt_options_dump_all(test, &opt); in main() 100 evt_options_dump_all(test, &opt); in main() 107 if (opt.verbose_level) in main() 108 evt_options_dump_all(test, &opt); in main() 168 test->ops.test_result(test, &opt); in main() 180 test->ops.test_destroy(test, &opt); in main() [all …]
|
| H A D | test_perf_queue.c | 15 return nb_prod * opt->nb_stages; in perf_queue_nb_event_queues() 173 struct evt_options *opt = w->t->opt; in worker_wrapper() local 201 int nb_stages = opt->nb_stages; in perf_queue_eventdev_setup() 212 evt_nr_active_lcores(opt->plcores); in perf_queue_eventdev_setup() 231 .nb_atomic_flows = opt->nb_flows, in perf_queue_eventdev_setup() 239 if (opt->q_priority) { in perf_queue_eventdev_setup() 263 .dequeue_depth = opt->wkr_deq_dep, in perf_queue_eventdev_setup() 336 evt_dump_fwd_latency(opt); in perf_queue_opt_dump() 337 perf_opt_dump(opt, perf_queue_nb_event_queues(opt)); in perf_queue_opt_dump() 343 return perf_opt_check(opt, perf_queue_nb_event_queues(opt)); in perf_queue_opt_check() [all …]
|
| H A D | test_perf_atq.c | 10 atq_nb_event_queues(struct evt_options *opt) in atq_nb_event_queues() argument 171 struct evt_options *opt = w->t->opt; in worker_wrapper() local 174 const int fwd_latency = opt->fwd_latency; in worker_wrapper() 209 evt_nr_active_lcores(opt->plcores); in perf_atq_eventdev_setup() 211 nb_queues = atq_nb_event_queues(opt); in perf_atq_eventdev_setup() 229 .nb_atomic_flows = opt->nb_flows, in perf_atq_eventdev_setup() 246 .dequeue_depth = opt->wkr_deq_dep, in perf_atq_eventdev_setup() 266 ret = rte_event_dev_start(opt->dev_id); in perf_atq_eventdev_setup() 317 perf_atq_opt_dump(struct evt_options *opt) in perf_atq_opt_dump() argument 319 perf_opt_dump(opt, atq_nb_event_queues(opt)); in perf_atq_opt_dump() [all …]
|
| H A D | test_perf_common.h | 67 struct evt_options *opt; member 92 struct evt_options *opt = t->opt;\ 96 opt->prod_type == EVT_PROD_TYPE_EVENT_TIMER_ADPTR;\ 98 opt->prod_type == EVT_PROD_TYPE_EVENT_CRYPTO_ADPTR;\ 101 const uint8_t nb_stages = t->opt->nb_stages;\ 106 if (opt->verbose_level > 1)\ 162 perf_nb_event_ports(struct evt_options *opt) in perf_nb_event_ports() argument 164 return evt_nr_active_lcores(opt->wlcores) + in perf_nb_event_ports() 165 evt_nr_active_lcores(opt->plcores); in perf_nb_event_ports() 169 int perf_opt_check(struct evt_options *opt, uint64_t nb_queues); [all …]
|
| H A D | test_order_queue.c | 146 .nb_atomic_flows = opt->nb_flows, in order_queue_eventdev_setup() 147 .nb_atomic_order_sequences = opt->nb_flows, in order_queue_eventdev_setup() 159 .nb_atomic_flows = opt->nb_flows, in order_queue_eventdev_setup() 160 .nb_atomic_order_sequences = opt->nb_flows, in order_queue_eventdev_setup() 173 if (!evt_has_distributed_sched(opt->dev_id)) { in order_queue_eventdev_setup() 183 ret = rte_event_dev_start(opt->dev_id); in order_queue_eventdev_setup() 193 order_queue_opt_dump(struct evt_options *opt) in order_queue_opt_dump() argument 195 order_opt_dump(opt); in order_queue_opt_dump() 200 order_queue_capability_check(struct evt_options *opt) in order_queue_capability_check() argument 204 rte_event_dev_info_get(opt->dev_id, &dev_info); in order_queue_capability_check() [all …]
|
| H A D | test_order_atq.c | 145 .nb_atomic_flows = opt->nb_flows, in order_atq_eventdev_setup() 146 .nb_atomic_order_sequences = opt->nb_flows, in order_atq_eventdev_setup() 159 if (!evt_has_distributed_sched(opt->dev_id)) { in order_atq_eventdev_setup() 169 ret = rte_event_dev_start(opt->dev_id); in order_atq_eventdev_setup() 171 evt_err("failed to start eventdev %d", opt->dev_id); in order_atq_eventdev_setup() 179 order_atq_opt_dump(struct evt_options *opt) in order_atq_opt_dump() argument 181 order_opt_dump(opt); in order_atq_opt_dump() 186 order_atq_capability_check(struct evt_options *opt) in order_atq_capability_check() argument 190 rte_event_dev_info_get(opt->dev_id, &dev_info); in order_atq_capability_check() 192 order_nb_event_ports(opt)) { in order_atq_capability_check() [all …]
|
| H A D | evt_test.h | 27 (struct evt_test *test, struct evt_options *opt); 29 (struct evt_test *test, struct evt_options *opt); 31 (struct evt_test *test, struct evt_options *opt); 33 (struct evt_test *test, struct evt_options *opt); 35 (struct evt_test *test, struct evt_options *opt); 37 (struct evt_test *test, struct evt_options *opt); 39 (struct evt_test *test, struct evt_options *opt); 41 (struct evt_test *test, struct evt_options *opt); 43 (struct evt_test *test, struct evt_options *opt); 45 struct evt_options *opt); [all …]
|
| H A D | test_order_common.h | 62 struct evt_options *opt; member 89 order_nb_event_ports(struct evt_options *opt) in order_nb_event_ports() argument 91 return evt_nr_active_lcores(opt->wlcores) + 1 /* producer */; in order_nb_event_ports() 130 struct evt_options *opt = t->opt;\ 136 if (opt->verbose_level > 1)\ 140 int order_test_result(struct evt_test *test, struct evt_options *opt); 141 int order_opt_check(struct evt_options *opt); 142 int order_test_setup(struct evt_test *test, struct evt_options *opt); 143 int order_mempool_setup(struct evt_test *test, struct evt_options *opt); 144 int order_launch_lcores(struct evt_test *test, struct evt_options *opt, [all …]
|
| H A D | test_pipeline_common.h | 52 struct evt_options *opt; member 79 const uint8_t last_queue = t->opt->nb_stages - 1; \ 81 const uint8_t nb_stages = t->opt->nb_stages + 1; \ 91 const uint8_t last_queue = t->opt->nb_stages - 1; \ 93 const uint8_t nb_stages = t->opt->nb_stages + 1; \ 187 pipeline_nb_event_ports(struct evt_options *opt) in pipeline_nb_event_ports() argument 189 return evt_nr_active_lcores(opt->wlcores); in pipeline_nb_event_ports() 193 int pipeline_opt_check(struct evt_options *opt, uint64_t nb_queues); 194 int pipeline_test_setup(struct evt_test *test, struct evt_options *opt); 198 int pipeline_event_tx_adapter_setup(struct evt_options *opt, [all …]
|
| H A D | test_pipeline_atq.c | 13 RTE_SET_USED(opt); in pipeline_atq_nb_event_queues() 525 struct evt_options *opt = w->t->opt; in worker_wrapper() local 528 const uint8_t nb_stages = opt->nb_stages; in worker_wrapper() 607 .nb_atomic_flows = opt->nb_flows, in pipeline_atq_eventdev_setup() 642 .dequeue_depth = opt->wkr_deq_dep, in pipeline_atq_eventdev_setup() 721 ret = rte_event_dev_start(opt->dev_id); in pipeline_atq_eventdev_setup() 760 pipeline_opt_dump(opt, pipeline_atq_nb_event_queues(opt)); in pipeline_atq_opt_dump() 766 return pipeline_opt_check(opt, pipeline_atq_nb_event_queues(opt)); in pipeline_atq_opt_check() 777 evt_nr_active_lcores(opt->wlcores)) { in pipeline_atq_capability_check() 779 pipeline_atq_nb_event_queues(opt), in pipeline_atq_capability_check() [all …]
|
| H A D | test_pipeline_queue.c | 15 return (eth_count * opt->nb_stages) + eth_count; in pipeline_queue_nb_event_queues() 569 struct evt_options *opt = w->t->opt; in worker_wrapper() local 621 int nb_stages = opt->nb_stages; in pipeline_queue_eventdev_setup() 649 .nb_atomic_flows = opt->nb_flows, in pipeline_queue_eventdev_setup() 683 .dequeue_depth = opt->wkr_deq_dep, in pipeline_queue_eventdev_setup() 753 ret = rte_event_dev_start(opt->dev_id); in pipeline_queue_eventdev_setup() 793 pipeline_opt_dump(opt, pipeline_queue_nb_event_queues(opt)); in pipeline_queue_opt_dump() 799 return pipeline_opt_check(opt, pipeline_queue_nb_event_queues(opt)); in pipeline_queue_opt_check() 810 evt_nr_active_lcores(opt->wlcores)) { in pipeline_queue_capability_check() 812 pipeline_queue_nb_event_queues(opt), in pipeline_queue_capability_check() [all …]
|
| H A D | evt_common.h | 168 ret = rte_event_dev_info_get(opt->dev_id, &info); in evt_configure_eventdev() 174 if (opt->deq_tmo_nsec) { in evt_configure_eventdev() 175 if (opt->deq_tmo_nsec < info.min_dequeue_timeout_ns) { in evt_configure_eventdev() 176 opt->deq_tmo_nsec = info.min_dequeue_timeout_ns; in evt_configure_eventdev() 178 opt->deq_tmo_nsec); in evt_configure_eventdev() 180 if (opt->deq_tmo_nsec > info.max_dequeue_timeout_ns) { in evt_configure_eventdev() 181 opt->deq_tmo_nsec = info.max_dequeue_timeout_ns; in evt_configure_eventdev() 183 opt->deq_tmo_nsec); in evt_configure_eventdev() 188 .dequeue_timeout_ns = opt->deq_tmo_nsec, in evt_configure_eventdev() 193 .nb_event_queue_flows = opt->nb_flows, in evt_configure_eventdev() [all …]
|
| /dpdk/lib/pcapng/ |
| H A D | rte_pcapng.c | 124 opt = pcapng_add_option(opt, PCAPNG_OPT_COMMENT, in pcapng_section_block() 127 opt = pcapng_add_option(opt, PCAPNG_SHB_HARDWARE, in pcapng_section_block() 130 opt = pcapng_add_option(opt, PCAPNG_SHB_OS, in pcapng_section_block() 133 opt = pcapng_add_option(opt, PCAPNG_SHB_USERAPPL, in pcapng_section_block() 216 opt = pcapng_add_option(opt, PCAPNG_IFB_TSRESOL, in pcapng_add_interface() 218 opt = pcapng_add_option(opt, PCAPNG_IFB_NAME, in pcapng_add_interface() 221 opt = pcapng_add_option(opt, PCAPNG_IFB_MACADDR, in pcapng_add_interface() 224 opt = pcapng_add_option(opt, PCAPNG_IFB_SPEED, in pcapng_add_interface() 306 opt = pcapng_add_option(opt, PCAPNG_ISB_IFRECV, in rte_pcapng_write_stats() 482 opt = pcapng_add_option(opt, PCAPNG_EPB_FLAGS, in rte_pcapng_copy() [all …]
|
| /dpdk/drivers/common/sfc_efx/base/ |
| H A D | efx_bootcfg.c | 23 (((opt) > EFX_DHCP_PAD) && ((opt) < EFX_DHCP_END)) 29 #define DHCP_IS_ENCAP_OPT(opt) DHCP_OPT_HAS_VALUE(DHCP_ENCAPSULATOR(opt)) argument 226 __in uint16_t opt) in efx_dhcp_walk_tags() argument 244 opt = DHCP_ENCAPSULATED(opt); in efx_dhcp_walk_tags() 316 __in uint16_t opt, in efx_dhcp_find_tag() argument 383 __in uint16_t opt) in efx_dhcp_delete_tag() argument 463 __in uint16_t opt, in efx_dhcp_write_tag() argument 484 __in uint16_t opt, in efx_dhcp_add_tag() argument 529 opt); in efx_dhcp_add_tag() 545 opt = DHCP_ENCAPSULATED(opt); in efx_dhcp_add_tag() [all …]
|
| /dpdk/drivers/common/dpaax/caamflib/rta/ |
| H A D | move_cmd.h | 81 uint16_t *offset, uint16_t *opt); 92 uint16_t offset = 0, opt = 0; in rta_move() local 123 &offset, &opt); in rta_move() 130 if (opt == MOVE_SET_AUX_SRC) { in rta_move() 137 if (opt & MOVE_SET_AUX_SRC) in rta_move() 183 if (opt == MOVE_SET_LEN_16b) in rta_move() 239 *opt = MOVE_SET_AUX_SRC; in set_move_offset() 249 *opt = MOVE_SET_AUX_LS; in set_move_offset() 290 *opt = MOVE_SET_AUX_DST; in set_move_offset() 293 *opt = MOVE_SET_AUX_MATH_DST; in set_move_offset() [all …]
|
| /dpdk/lib/eal/windows/ |
| H A D | eal.c | 101 int opt; in eal_log_level_parse() local 117 if (opt == '?') in eal_log_level_parse() 120 ret = (opt == OPT_LOG_LEVEL_NUM) ? in eal_log_level_parse() 136 int opt, ret; in eal_parse_args() local 151 if (opt == '?') { in eal_parse_args() 157 if (opt == OPT_LOG_LEVEL_NUM) in eal_parse_args() 170 switch (opt) { in eal_parse_args() 175 if (opt < OPT_LONG_MIN_NUM && isprint(opt)) { in eal_parse_args() 177 "on Windows\n", opt); in eal_parse_args() 179 opt < OPT_LONG_MAX_NUM) { in eal_parse_args() [all …]
|
| /dpdk/drivers/common/dpaax/caamflib/ |
| H A D | rta.h | 274 #define MOVE(program, src, src_offset, dst, dst_offset, length, opt) \ argument 275 rta_move(program, __MOVE, src, src_offset, dst, dst_offset, length, opt) 302 #define MOVEB(program, src, src_offset, dst, dst_offset, length, opt) \ argument 304 opt) 331 #define MOVEDW(program, src, src_offset, dst, dst_offset, length, opt) \ argument 333 opt) 704 length, opt) 731 #define MATHI(program, operand, operator, imm, result, length, opt) \ argument 733 opt) 753 #define MATHU(program, operand1, operator, result, length, opt) \ argument [all …]
|
| /dpdk/lib/eal/freebsd/ |
| H A D | eal.c | 369 int opt; in eal_log_level_parse() local 389 if (opt == '?') in eal_log_level_parse() 392 ret = (opt == OPT_LOG_LEVEL_NUM) ? in eal_log_level_parse() 411 int opt, ret; in eal_parse_args() local 430 if (opt == '?') { in eal_parse_args() 437 if (opt == OPT_LOG_LEVEL_NUM) in eal_parse_args() 451 switch (opt) { in eal_parse_args() 470 if (opt < OPT_LONG_MIN_NUM && isprint(opt)) { in eal_parse_args() 472 "on FreeBSD\n", opt); in eal_parse_args() 474 opt < OPT_LONG_MAX_NUM) { in eal_parse_args() [all …]
|
| /dpdk/examples/multi_process/client_server_mp/mp_server/ |
| H A D | args.c | 108 int option_index, opt; in parse_app_args() local 115 while ((opt = getopt_long(argc, argvopt, "n:p:", lgopts, in parse_app_args() 117 switch (opt){ in parse_app_args() 131 printf("ERROR: Unknown option '%c'\n", opt); in parse_app_args()
|
| /dpdk/examples/ethtool/ethtool-app/ |
| H A D | ethapp.c | 28 cmdline_fixed_string_t opt; member 37 cmdline_fixed_string_t opt; member 124 opt, "all#tx#rx#none"); 224 fp_regs = fopen(params->opt, "wb"); in pcmd_regs_callback() 227 params->opt); in pcmd_regs_callback() 233 params->opt); in pcmd_regs_callback() 264 fp_eeprom = fopen(params->opt, "wb"); in pcmd_eeprom_callback() 267 params->opt); in pcmd_eeprom_callback() 337 fp_eeprom = fopen(params->opt, "wb"); in pcmd_module_eeprom_callback() 346 printf("Error writing '%s'\n", params->opt); in pcmd_module_eeprom_callback() [all …]
|