Home
last modified time | relevance | path

Searched refs:dev_info (Results 1 – 25 of 275) sorted by relevance

1234567891011

/dpdk/kernel/linux/kni/
H A Dkni_misc.c311 if (copy_from_user(&dev_info, (void *)ioctl_param, sizeof(dev_info))) in kni_ioctl_create()
315 if (strnlen(dev_info.name, sizeof(dev_info.name)) == sizeof(dev_info.name)) { in kni_ioctl_create()
323 if (dev_info.force_bind && !cpu_online(dev_info.core_id)) { in kni_ioctl_create()
357 if (dev_info.iova_mode) { in kni_ioctl_create()
411 if (dev_info.mtu) in kni_ioctl_create()
412 net_dev->mtu = dev_info.mtu; in kni_ioctl_create()
416 if (dev_info.min_mtu) in kni_ioctl_create()
419 if (dev_info.max_mtu) in kni_ioctl_create()
426 ret, dev_info.name); in kni_ioctl_create()
458 if (copy_from_user(&dev_info, (void *)ioctl_param, sizeof(dev_info))) in kni_ioctl_release()
[all …]
/dpdk/lib/eventdev/
H A Drte_event_eth_rx_adapter.c400 dev_info->rx_queue && in rxa_polled_queue()
475 dev_info->nb_rx_intr; in rxa_calc_nb_post_add_intr()
529 dev_info->nb_rx_poll; in rxa_calc_nb_post_add_poll()
531 - dev_info->wrr_len; in rxa_calc_nb_post_add_poll()
663 dev_info->wrr_len = 0; in rxa_calc_wrr_sequence()
666 &dev_info->rx_queue[q]; in rxa_calc_wrr_sequence()
1274 dev_info->next_q_idx = dev_info->multi_intr_cap ? in rxa_intr_ring_dequeue()
1729 dev_info->intr_queue = in rxa_config_intr()
2178 dev_info->rx_queue = in rxa_sw_add()
2661 dev_info->rx_queue = in rte_event_eth_rx_adapter_queue_add()
[all …]
H A Drte_event_crypto_adapter.c921 dev_info->qpairs = in eca_add_queue_pair()
929 qpairs = dev_info->qpairs; in eca_add_queue_pair()
1012 dev_info->qpairs = in rte_event_crypto_adapter_queue_pair_add()
1022 dev_info->dev, in rte_event_crypto_adapter_queue_pair_add()
1114 dev_info->dev, in rte_event_crypto_adapter_queue_pair_del()
1123 dev_info->qpairs = NULL; in rte_event_crypto_adapter_queue_pair_del()
1143 dev_info->qpairs = NULL; in rte_event_crypto_adapter_queue_pair_del()
1187 &dev_info->dev[i]) : in eca_adapter_ctrl()
1189 &dev_info->dev[i]); in eca_adapter_ctrl()
1245 dev_info->dev, in rte_event_crypto_adapter_stats_get()
[all …]
/dpdk/drivers/bus/pci/windows/
H A Dpci.c174 get_device_pci_address(HDEVINFO dev_info, in get_device_pci_address() argument
205 get_device_resource_info(HDEVINFO dev_info, in get_device_resource_info() argument
244 res = SetupDiGetDevicePropertyW(dev_info, dev_info_data, in get_device_resource_info()
358 ret = get_pci_hardware_id(dev_info, device_info_data, in pci_scan_one()
434 HDEVINFO dev_info; in rte_pci_scan() local
441 dev_info = SetupDiGetClassDevs(NULL, TEXT("PCI"), NULL, in rte_pci_scan()
443 if (dev_info == INVALID_HANDLE_VALUE) { in rte_pci_scan()
452 while (SetupDiEnumDeviceInfo(dev_info, device_index, in rte_pci_scan()
460 ret = pci_scan_one(dev_info, &device_info_data); in rte_pci_scan()
473 if (dev_info != INVALID_HANDLE_VALUE) in rte_pci_scan()
[all …]
/dpdk/drivers/net/bnxt/
H A Drte_pmd_bnxt.c138 struct rte_eth_dev_info dev_info; in rte_pmd_bnxt_set_vf_mac_addr() local
264 if (vf >= dev_info.max_vfs) in rte_pmd_bnxt_set_vf_mac_anti_spoof()
323 if (vf >= dev_info.max_vfs) in rte_pmd_bnxt_set_vf_vlan_anti_spoof()
374 if (vf >= dev_info.max_vfs) in rte_pmd_bnxt_set_vf_vlan_stripq()
609 if (vf_id >= dev_info.max_vfs) in rte_pmd_bnxt_get_vf_stats()
645 if (vf_id >= dev_info.max_vfs) in rte_pmd_bnxt_reset_vf_stats()
679 if (vf_id >= dev_info.max_vfs) in rte_pmd_bnxt_get_vf_rx_status()
714 if (vf_id >= dev_info.max_vfs) in rte_pmd_bnxt_get_vf_tx_drop_count()
753 if (vf_id >= dev_info.max_vfs) in rte_pmd_bnxt_mac_addr_add()
836 if (vf >= dev_info.max_vfs) in rte_pmd_bnxt_set_vf_vlan_insert()
[all …]
/dpdk/app/test-eventdev/
H A Devt_common.h88 struct rte_event_dev_info dev_info; in evt_has_distributed_sched() local
90 rte_event_dev_info_get(dev_id, &dev_info); in evt_has_distributed_sched()
98 struct rte_event_dev_info dev_info; in evt_has_burst_mode() local
100 rte_event_dev_info_get(dev_id, &dev_info); in evt_has_burst_mode()
101 return (dev_info.event_dev_cap & RTE_EVENT_DEV_CAP_BURST_MODE) ? in evt_has_burst_mode()
109 struct rte_event_dev_info dev_info; in evt_has_all_types_queue() local
111 rte_event_dev_info_get(dev_id, &dev_info); in evt_has_all_types_queue()
112 return (dev_info.event_dev_cap & RTE_EVENT_DEV_CAP_QUEUE_ALL_TYPES) ? in evt_has_all_types_queue()
119 struct rte_event_dev_info dev_info; in evt_has_flow_id() local
121 rte_event_dev_info_get(dev_id, &dev_info); in evt_has_flow_id()
[all …]
H A Dtest_perf_atq.c203 struct rte_event_dev_info dev_info; in perf_atq_eventdev_setup() local
213 memset(&dev_info, 0, sizeof(struct rte_event_dev_info)); in perf_atq_eventdev_setup()
214 ret = rte_event_dev_info_get(opt->dev_id, &dev_info); in perf_atq_eventdev_setup()
242 opt->wkr_deq_dep = dev_info.max_event_port_dequeue_depth; in perf_atq_eventdev_setup()
247 .enqueue_depth = dev_info.max_event_port_dequeue_depth, in perf_atq_eventdev_setup()
248 .new_event_threshold = dev_info.max_num_events, in perf_atq_eventdev_setup()
331 struct rte_event_dev_info dev_info; in perf_atq_capability_check() local
333 rte_event_dev_info_get(opt->dev_id, &dev_info); in perf_atq_capability_check()
335 dev_info.max_event_ports < perf_nb_event_ports(opt)) { in perf_atq_capability_check()
337 atq_nb_event_queues(opt), dev_info.max_event_queues, in perf_atq_capability_check()
[all …]
/dpdk/examples/l3fwd/
H A Dl3fwd_event_internal_port.c20 struct rte_event_dev_info dev_info; in l3fwd_event_device_setup_internal_port() local
35 rte_event_dev_info_get(event_d_id, &dev_info); in l3fwd_event_device_setup_internal_port()
49 event_d_conf.nb_events_limit = dev_info.max_num_events; in l3fwd_event_device_setup_internal_port()
53 dev_info.max_event_queue_flows; in l3fwd_event_device_setup_internal_port()
55 if (dev_info.max_event_port_dequeue_depth < in l3fwd_event_device_setup_internal_port()
58 dev_info.max_event_port_dequeue_depth; in l3fwd_event_device_setup_internal_port()
60 if (dev_info.max_event_port_enqueue_depth < in l3fwd_event_device_setup_internal_port()
63 dev_info.max_event_port_enqueue_depth; in l3fwd_event_device_setup_internal_port()
66 if (dev_info.max_event_ports < num_workers) in l3fwd_event_device_setup_internal_port()
67 num_workers = dev_info.max_event_ports; in l3fwd_event_device_setup_internal_port()
[all …]
H A Dl3fwd_event_generic.c20 struct rte_event_dev_info dev_info; in l3fwd_event_device_setup_generic() local
35 rte_event_dev_info_get(event_d_id, &dev_info); in l3fwd_event_device_setup_generic()
49 event_d_conf.nb_events_limit = dev_info.max_num_events; in l3fwd_event_device_setup_generic()
53 dev_info.max_event_queue_flows; in l3fwd_event_device_setup_generic()
55 if (dev_info.max_event_port_dequeue_depth < in l3fwd_event_device_setup_generic()
58 dev_info.max_event_port_dequeue_depth; in l3fwd_event_device_setup_generic()
60 if (dev_info.max_event_port_enqueue_depth < in l3fwd_event_device_setup_generic()
63 dev_info.max_event_port_enqueue_depth; in l3fwd_event_device_setup_generic()
66 if (dev_info.max_event_ports < num_workers) in l3fwd_event_device_setup_generic()
67 num_workers = dev_info.max_event_ports; in l3fwd_event_device_setup_generic()
[all …]
/dpdk/drivers/net/ixgbe/
H A Dixgbe_vf_representor.c40 struct rte_eth_dev_info *dev_info) in ixgbe_vf_representor_dev_infos_get() argument
47 dev_info->device = representor->pf_ethdev->device; in ixgbe_vf_representor_dev_infos_get()
49 dev_info->min_rx_bufsize = 1024; in ixgbe_vf_representor_dev_infos_get()
51 dev_info->max_rx_pktlen = 9728; in ixgbe_vf_representor_dev_infos_get()
53 dev_info->max_rx_queues = IXGBE_VF_MAX_RX_QUEUES; in ixgbe_vf_representor_dev_infos_get()
55 dev_info->max_tx_queues = IXGBE_VF_MAX_TX_QUEUES; in ixgbe_vf_representor_dev_infos_get()
58 dev_info->max_mac_addrs = hw->mac.num_rar_entries; in ixgbe_vf_representor_dev_infos_get()
61 dev_info->rx_offload_capa = RTE_ETH_RX_OFFLOAD_VLAN_STRIP | in ixgbe_vf_representor_dev_infos_get()
72 dev_info->speed_capa = in ixgbe_vf_representor_dev_infos_get()
76 dev_info->switch_info.name = in ixgbe_vf_representor_dev_infos_get()
[all …]
/dpdk/examples/l2fwd-event/
H A Dl2fwd_event_internal_port.c29 struct rte_event_dev_info dev_info; in l2fwd_event_device_setup_internal_port() local
44 rte_event_dev_info_get(event_d_id, &dev_info); in l2fwd_event_device_setup_internal_port()
58 event_d_conf.nb_events_limit = dev_info.max_num_events; in l2fwd_event_device_setup_internal_port()
62 dev_info.max_event_queue_flows; in l2fwd_event_device_setup_internal_port()
64 if (dev_info.max_event_port_dequeue_depth < in l2fwd_event_device_setup_internal_port()
67 dev_info.max_event_port_dequeue_depth; in l2fwd_event_device_setup_internal_port()
69 if (dev_info.max_event_port_enqueue_depth < in l2fwd_event_device_setup_internal_port()
72 dev_info.max_event_port_enqueue_depth; in l2fwd_event_device_setup_internal_port()
76 if (dev_info.max_event_ports < num_workers) in l2fwd_event_device_setup_internal_port()
77 num_workers = dev_info.max_event_ports; in l2fwd_event_device_setup_internal_port()
[all …]
H A Dl2fwd_event_generic.c31 struct rte_event_dev_info dev_info; in l2fwd_event_device_setup_generic() local
46 rte_event_dev_info_get(event_d_id, &dev_info); in l2fwd_event_device_setup_generic()
61 event_d_conf.nb_events_limit = dev_info.max_num_events; in l2fwd_event_device_setup_generic()
65 dev_info.max_event_queue_flows; in l2fwd_event_device_setup_generic()
67 if (dev_info.max_event_port_dequeue_depth < in l2fwd_event_device_setup_generic()
70 dev_info.max_event_port_dequeue_depth; in l2fwd_event_device_setup_generic()
72 if (dev_info.max_event_port_enqueue_depth < in l2fwd_event_device_setup_generic()
75 dev_info.max_event_port_enqueue_depth; in l2fwd_event_device_setup_generic()
79 if (dev_info.max_event_ports < num_workers) in l2fwd_event_device_setup_generic()
80 num_workers = dev_info.max_event_ports; in l2fwd_event_device_setup_generic()
[all …]
/dpdk/examples/eventdev_pipeline/
H A Dpipeline_worker_generic.c168 struct rte_event_dev_info dev_info; in setup_eventdev_generic() local
180 if (dev_info.max_event_port_dequeue_depth < in setup_eventdev_generic()
183 dev_info.max_event_port_dequeue_depth; in setup_eventdev_generic()
187 dev_info.max_event_port_enqueue_depth; in setup_eventdev_generic()
305 struct rte_eth_dev_info dev_info; in port_init() local
325 rx_conf = dev_info.default_rxconf; in port_init()
329 dev_info.flow_type_rss_offloads; in port_init()
353 txconf = dev_info.default_txconf; in port_init()
414 struct rte_event_dev_info dev_info; in init_adapters() local
429 dev_info.max_event_port_dequeue_depth; in init_adapters()
[all …]
/dpdk/drivers/net/ice/
H A Dice_dcf_vf_representor.c127 struct rte_eth_dev_info *dev_info) in ice_dcf_vf_repr_dev_info_get() argument
135 dev_info->device = dev->device; in ice_dcf_vf_repr_dev_info_get()
136 dev_info->max_mac_addrs = 1; in ice_dcf_vf_repr_dev_info_get()
139 dev_info->min_rx_bufsize = ICE_BUF_SIZE_MIN; in ice_dcf_vf_repr_dev_info_get()
140 dev_info->max_rx_pktlen = ICE_FRAME_SIZE_MAX; in ice_dcf_vf_repr_dev_info_get()
142 dev_info->reta_size = dcf_hw->vf_res->rss_lut_size; in ice_dcf_vf_repr_dev_info_get()
145 dev_info->rx_offload_capa = in ice_dcf_vf_repr_dev_info_get()
155 dev_info->tx_offload_capa = in ice_dcf_vf_repr_dev_info_get()
191 dev_info->rx_desc_lim = (struct rte_eth_desc_lim) { in ice_dcf_vf_repr_dev_info_get()
197 dev_info->tx_desc_lim = (struct rte_eth_desc_lim) { in ice_dcf_vf_repr_dev_info_get()
[all …]
/dpdk/drivers/raw/ifpga/base/
H A Difpga_fme_pr.c24 dev_info(NULL, "%s\n", pr_err_msg[i]); in pr_err_handle()
46 dev_info(fme_dev, "resetting PR before initiated PR\n"); in fme_pr_write_init()
97 dev_info(fme_dev, "set PR port ID and start request\n"); in fme_pr_write()
104 dev_info(fme_dev, "pushing data from bitstream to HW\n"); in fme_pr_write()
164 dev_info(fme_dev, "green bitstream push complete\n"); in fme_pr_write_complete()
181 dev_info(fme_dev, "PR done successfully\n"); in fme_pr_write_complete()
300 dev_info(hw, "this is a valid bitsteam..\n"); in do_pr()
329 dev_info(NULL, "FME PR MGMT Init.\n"); in fme_pr_mgmt_init()
337 dev_info(NULL, "using 512-bit PR\n"); in fme_pr_mgmt_init()
340 dev_info(NULL, "using 32-bit PR\n"); in fme_pr_mgmt_init()
[all …]
/dpdk/drivers/net/i40e/
H A Di40e_vf_representor.c29 struct rte_eth_dev_info *dev_info) in i40e_vf_representor_dev_infos_get() argument
36 dev_info->device = ethdev->device; in i40e_vf_representor_dev_infos_get()
40 dev_info->max_rx_queues = ethdev->data->nb_rx_queues; in i40e_vf_representor_dev_infos_get()
41 dev_info->max_tx_queues = ethdev->data->nb_tx_queues; in i40e_vf_representor_dev_infos_get()
43 dev_info->min_rx_bufsize = I40E_BUF_SIZE_MIN; in i40e_vf_representor_dev_infos_get()
44 dev_info->max_rx_pktlen = I40E_FRAME_SIZE_MAX; in i40e_vf_representor_dev_infos_get()
47 dev_info->reta_size = RTE_ETH_RSS_RETA_SIZE_64; in i40e_vf_representor_dev_infos_get()
49 dev_info->max_mac_addrs = I40E_NUM_MACADDR_MAX; in i40e_vf_representor_dev_infos_get()
50 dev_info->rx_offload_capa = in i40e_vf_representor_dev_infos_get()
57 dev_info->tx_offload_capa = in i40e_vf_representor_dev_infos_get()
[all …]
/dpdk/lib/dmadev/
H A Drte_dmadev.c429 dev_info->dev_name = dev->data->dev_name; in rte_dma_info_get()
440 struct rte_dma_info dev_info; in rte_dma_configure() local
453 ret = rte_dma_info_get(dev_id, &dev_info); in rte_dma_configure()
570 struct rte_dma_info dev_info; in rte_dma_vchan_setup() local
584 ret = rte_dma_info_get(dev_id, &dev_info); in rte_dma_vchan_setup()
593 if (vchan >= dev_info.nb_vchans) { in rte_dma_vchan_setup()
628 if (conf->nb_desc < dev_info.min_desc || in rte_dma_vchan_setup()
629 conf->nb_desc > dev_info.max_desc) { in rte_dma_vchan_setup()
762 struct rte_dma_info dev_info; in rte_dma_dump() local
768 ret = rte_dma_info_get(dev_id, &dev_info); in rte_dma_dump()
[all …]
/dpdk/lib/kni/
H A Drte_kni.c216 struct rte_kni_device_info dev_info; in rte_kni_alloc() local
257 memset(&dev_info, 0, sizeof(dev_info)); in rte_kni_alloc()
258 dev_info.core_id = conf->core_id; in rte_kni_alloc()
260 dev_info.group_id = conf->group_id; in rte_kni_alloc()
261 dev_info.mbuf_size = conf->mbuf_size; in rte_kni_alloc()
262 dev_info.mtu = conf->mtu; in rte_kni_alloc()
263 dev_info.min_mtu = conf->min_mtu; in rte_kni_alloc()
264 dev_info.max_mtu = conf->max_mtu; in rte_kni_alloc()
277 dev_info.tx_phys = kni->m_tx_q->iova; in rte_kni_alloc()
406 struct rte_kni_device_info dev_info; in rte_kni_release() local
[all …]
/dpdk/lib/regexdev/
H A Drte_regexdev.c189 if (dev_info == NULL) in regexdev_info_get()
193 return (*dev->dev_ops->dev_info_get)(dev, dev_info); in regexdev_info_get()
200 return regexdev_info_get(dev_id, dev_info); in rte_regexdev_info_get()
207 struct rte_regexdev_info dev_info; in rte_regexdev_configure() local
221 ret = regexdev_info_get(dev_id, &dev_info); in rte_regexdev_configure()
250 if (cfg->nb_groups > dev_info.max_groups) { in rte_regexdev_configure()
252 dev_id, cfg->nb_groups, dev_info.max_groups); in rte_regexdev_configure()
260 if (cfg->nb_max_matches > dev_info.max_matches) { in rte_regexdev_configure()
263 dev_info.max_matches); in rte_regexdev_configure()
274 dev_info.max_queue_pairs); in rte_regexdev_configure()
[all …]
/dpdk/app/test/
H A Dtest_event_eth_tx_adapter.c234 struct rte_event_dev_info dev_info; in tx_adapter_create() local
252 err = rte_event_dev_info_get(TEST_DEV_ID, &dev_info); in tx_adapter_create()
255 dev_info.max_event_port_dequeue_depth; in tx_adapter_create()
257 dev_info.max_event_port_enqueue_depth; in tx_adapter_create()
259 dev_info.max_num_events; in tx_adapter_create()
276 err = rte_event_dev_info_get(TEST_DEV_ID, &dev_info); in tx_adapter_create()
299 struct rte_event_dev_info dev_info; in tx_adapter_create_free() local
476 struct rte_event_dev_info dev_info; in tx_adapter_service() local
514 dev_info.max_event_port_dequeue_depth; in tx_adapter_service()
516 dev_info.max_event_port_enqueue_depth; in tx_adapter_service()
[all …]
H A Dtest_dmadev_api.c223 vchan_conf.nb_desc = dev_info->min_desc; in check_port_type()
231 vchan_conf.nb_desc = dev_info->min_desc; in check_port_type()
244 struct rte_dma_info dev_info = { 0 }; in test_dma_vchan_setup() local
258 dev_conf.nb_vchans = dev_info.max_vchans; in test_dma_vchan_setup()
273 vchan_conf.nb_desc = dev_info.min_desc - 1; in test_dma_vchan_setup()
281 ret = check_port_type(&dev_info); in test_dma_vchan_setup()
287 vchan_conf.nb_desc = dev_info.min_desc; in test_dma_vchan_setup()
298 struct rte_dma_info dev_info = { 0 }; in setup_one_vchan() local
304 dev_conf.nb_vchans = dev_info.max_vchans; in setup_one_vchan()
308 vchan_conf.nb_desc = dev_info.min_desc; in setup_one_vchan()
[all …]
/dpdk/drivers/baseband/null/
H A Dbbdev_null.c69 dev_info->driver_name = RTE_STR(DRIVER_NAME); in info_get()
70 dev_info->max_num_queues = internals->max_nb_queues; in info_get()
71 dev_info->queue_size_lim = RTE_BBDEV_QUEUE_SIZE_LIMIT; in info_get()
72 dev_info->hardware_accelerated = false; in info_get()
73 dev_info->max_dl_queue_priority = 0; in info_get()
74 dev_info->max_ul_queue_priority = 0; in info_get()
75 dev_info->default_queue_conf = default_queue_conf; in info_get()
76 dev_info->capabilities = bbdev_capabilities; in info_get()
77 dev_info->cpu_flag_reqs = NULL; in info_get()
78 dev_info->min_alignment = 0; in info_get()
[all …]
/dpdk/lib/bbdev/
H A Drte_bbdev.c315 struct rte_bbdev_driver_info dev_info; in rte_bbdev_setup_queues() local
330 memset(&dev_info, 0, sizeof(dev_info)); in rte_bbdev_setup_queues()
460 memset(&dev_info, 0, sizeof(dev_info)); in rte_bbdev_queue_configure()
466 (dev_info.capabilities[0].type == in rte_bbdev_queue_configure()
470 for (p = dev_info.capabilities; in rte_bbdev_queue_configure()
486 dev_info.queue_size_lim); in rte_bbdev_queue_configure()
500 dev_info.max_ul_queue_priority); in rte_bbdev_queue_configure()
508 dev_info.max_dl_queue_priority); in rte_bbdev_queue_configure()
787 if (dev_info == NULL) { in rte_bbdev_info_get()
793 memset(dev_info, 0, sizeof(*dev_info)); in rte_bbdev_info_get()
[all …]
/dpdk/examples/vmdq/
H A Dmain.c175 struct rte_eth_dev_info dev_info; in port_init() local
192 retval = rte_eth_dev_info_get(port, &dev_info); in port_init()
217 num_pf_queues = dev_info.max_rx_queues - dev_info.vmdq_queue_num; in port_init()
218 queues_per_pool = dev_info.vmdq_queue_num / dev_info.max_vmdq_pools; in port_init()
221 vmdq_queue_base = dev_info.vmdq_queue_base; in port_init()
222 vmdq_pool_base = dev_info.vmdq_pool_base; in port_init()
234 dev_info.flow_type_rss_offloads; in port_init()
251 rxRings = (uint16_t)dev_info.max_rx_queues; in port_init()
252 txRings = (uint16_t)dev_info.max_tx_queues; in port_init()
279 rxconf = &dev_info.default_rxconf; in port_init()
[all …]
/dpdk/drivers/event/skeleton/
H A Dskeleton_eventdev.c83 struct rte_event_dev_info *dev_info) in skeleton_eventdev_info_get() argument
91 dev_info->min_dequeue_timeout_ns = 1; in skeleton_eventdev_info_get()
92 dev_info->max_dequeue_timeout_ns = 10000; in skeleton_eventdev_info_get()
93 dev_info->dequeue_timeout_ns = 25; in skeleton_eventdev_info_get()
94 dev_info->max_event_queues = 64; in skeleton_eventdev_info_get()
96 dev_info->max_event_queue_priority_levels = 8; in skeleton_eventdev_info_get()
97 dev_info->max_event_priority_levels = 8; in skeleton_eventdev_info_get()
98 dev_info->max_event_ports = 32; in skeleton_eventdev_info_get()
99 dev_info->max_event_port_dequeue_depth = 16; in skeleton_eventdev_info_get()
100 dev_info->max_event_port_enqueue_depth = 16; in skeleton_eventdev_info_get()
[all …]

1234567891011