Home
last modified time | relevance | path

Searched refs:numa_node (Results 1 – 25 of 120) sorted by relevance

12345

/dpdk/drivers/net/ring/
H A Drte_eth_ring.c32 const unsigned int numa_node; member
326 numa_node); in do_eth_dev_ring_create()
329 sizeof(void *), 0, numa_node); in do_eth_dev_ring_create()
336 sizeof(void *), 0, numa_node); in do_eth_dev_ring_create()
391 data->numa_node = numa_node; in do_eth_dev_ring_create()
415 const unsigned int numa_node) in rte_eth_from_rings() argument
422 .numa_node = numa_node, in rte_eth_from_rings()
478 const unsigned int numa_node, in eth_dev_ring_create() argument
501 rte_ring_create(rng_name, 1024, numa_node, in eth_dev_ring_create()
509 numa_node, action, eth_dev) < 0) in eth_dev_ring_create()
[all …]
H A Drte_eth_ring.h37 const unsigned numa_node);
/dpdk/lib/eal/windows/
H A Deal_hugepages.c82 unsigned int numa_node; in hugepage_info_init() local
84 numa_node = eal_socket_numa_node(socket_id); in hugepage_info_init()
85 if (!GetNumaAvailableMemoryNodeEx(numa_node, &bytes)) { in hugepage_info_init()
87 numa_node); in hugepage_info_init()
H A Deal_memalloc.c39 unsigned int numa_node; in alloc_seg() local
62 numa_node = eal_socket_numa_node(socket_id); in alloc_seg()
121 if (page->Node != numa_node) { in alloc_seg()
124 numa_node, socket_id, page->Node); in alloc_seg()
/dpdk/lib/ethdev/
H A Dethdev_vdev.h45 dev->device.numa_node); in rte_eth_vdev_allocate()
55 eth_dev->data->numa_node = dev->device.numa_node; in rte_eth_vdev_allocate()
H A Dethdev_pci.h48 eth_dev->data->numa_node = pci_dev->device.numa_node; in rte_eth_copy_pci_info()
98 dev->device.numa_node); in rte_eth_dev_pci_allocate()
/dpdk/lib/table/
H A Drte_swx_table_wm.c23 env_malloc(size_t size, size_t alignment, int numa_node) in env_malloc() argument
25 return rte_zmalloc_socket(NULL, size, alignment, numa_node); in env_malloc()
39 env_malloc(size_t size, size_t alignment __rte_unused, int numa_node) in env_malloc() argument
41 return numa_alloc_onnode(size, numa_node); in env_malloc()
278 int numa_node) in acl_table_create() argument
310 acl_params.socket_id = numa_node; in acl_table_create()
385 int numa_node) in table_create() argument
402 t = env_malloc(total_size, RTE_CACHE_LINE_SIZE, numa_node); in table_create()
411 t->acl_ctx = acl_table_create(params, entries, n_entries, numa_node); in table_create()
H A Drte_swx_table_learner.c23 env_calloc(size_t size, size_t alignment, int numa_node) in env_calloc() argument
25 return rte_zmalloc_socket(NULL, size, alignment, numa_node); in env_calloc()
39 env_calloc(size_t size, size_t alignment __rte_unused, int numa_node) in env_calloc() argument
46 start = numa_alloc_onnode(size, numa_node); in env_calloc()
388 rte_swx_table_learner_create(struct rte_swx_table_learner_params *params, int numa_node) in rte_swx_table_learner_create() argument
400 t = env_calloc(p.total_size, RTE_CACHE_LINE_SIZE, numa_node); in rte_swx_table_learner_create()
H A Drte_swx_table_selector.c18 env_calloc(size_t size, size_t alignment, int numa_node) in env_calloc() argument
20 return rte_zmalloc_socket(NULL, size, alignment, numa_node); in env_calloc()
34 env_calloc(size_t size, size_t alignment __rte_unused, int numa_node) in env_calloc() argument
41 start = numa_alloc_onnode(size, numa_node); in env_calloc()
265 int numa_node) in rte_swx_table_selector_create() argument
290 t->group_table = env_calloc(t->group_table_size, RTE_CACHE_LINE_SIZE, numa_node); in rte_swx_table_selector_create()
H A Drte_swx_table_em.c28 env_malloc(size_t size, size_t alignment, int numa_node) in env_malloc() argument
30 return rte_zmalloc_socket(NULL, size, alignment, numa_node); in env_malloc()
44 env_malloc(size_t size, size_t alignment __rte_unused, int numa_node) in env_malloc() argument
46 return numa_alloc_onnode(size, numa_node); in env_malloc()
373 int numa_node) in __table_create() argument
423 memory = env_malloc(total_size, RTE_CACHE_LINE_SIZE, numa_node); in __table_create()
787 int numa_node) in table_create() argument
794 status = __table_create(&t, NULL, params, args, numa_node); in table_create()
/dpdk/drivers/bus/pci/windows/
H A Dpci.c209 DWORD numa_node; in get_device_resource_info() local
243 dev->device.numa_node = SOCKET_ID_ANY; in get_device_resource_info()
246 (BYTE *)&numa_node, sizeof(numa_node), NULL, 0); in get_device_resource_info()
251 dev->device.numa_node = 0; in get_device_resource_info()
258 dev->device.numa_node = numa_node; in get_device_resource_info()
/dpdk/drivers/bus/auxiliary/linux/
H A Dauxiliary.c47 dev->device.numa_node = tmp; in auxiliary_scan_one()
49 dev->device.numa_node = -1; in auxiliary_scan_one()
51 dev->device.numa_node = 0; in auxiliary_scan_one()
/dpdk/lib/dmadev/
H A Drte_dmadev.c234 dma_allocate_primary(const char *name, int numa_node, size_t private_data_size) in dma_allocate_primary() argument
254 RTE_CACHE_LINE_SIZE, numa_node); in dma_allocate_primary()
271 dev->data->numa_node = numa_node; in dma_allocate_primary()
308 dma_allocate(const char *name, int numa_node, size_t private_data_size) in dma_allocate() argument
313 dev = dma_allocate_primary(name, numa_node, private_data_size); in dma_allocate()
338 rte_dma_pmd_allocate(const char *name, int numa_node, size_t private_data_size) in rte_dma_pmd_allocate() argument
345 dev = dma_allocate(name, numa_node, private_data_size); in rte_dma_pmd_allocate()
430 dev_info->numa_node = dev->device->numa_node; in rte_dma_info_get()
H A Drte_dmadev_pmd.h99 int16_t numa_node; /**< Local NUMA memory ID. -1 if unknown. */ member
153 struct rte_dma_dev *rte_dma_pmd_allocate(const char *name, int numa_node,
/dpdk/drivers/raw/ioat/
H A Didxd_bus.c212 dev->wq_name, dev->device.numa_node); in idxd_rawdev_probe_dsa()
292 int numa_node = -1; in dsa_scan() local
314 read_device_int(dev, "numa_node", &numa_node); in dsa_scan()
315 dev->device.numa_node = numa_node; in dsa_scan()
H A Dioat_rawdev.c69 dev->device->numa_node, RTE_MEMZONE_IOVA_CONTIG); in ioat_dev_configure()
161 dev->device.numa_node); in ioat_rawdev_create()
186 dev->device.numa_node, RTE_MEMZONE_IOVA_CONTIG); in ioat_rawdev_create()
287 IOAT_PMD_INFO("Init %s on NUMA node %d", name, dev->device.numa_node); in ioat_rawdev_probe()
303 name, dev->device.numa_node); in ioat_rawdev_remove()
/dpdk/drivers/dma/idxd/
H A Didxd_bus.c222 dev->wq_name, dev->device.numa_node); in idxd_probe_dsa()
324 int numa_node = -1; in dsa_scan() local
346 read_device_int(dev, "numa_node", &numa_node); in dsa_scan()
347 dev->device.numa_node = numa_node; in dsa_scan()
/dpdk/drivers/bus/auxiliary/
H A Dauxiliary_common.c109 if (dev->device.numa_node < 0) { in rte_auxiliary_probe_one_driver()
113 dev->device.numa_node = 0; in rte_auxiliary_probe_one_driver()
136 drv->driver.name, dev->name, dev->device.numa_node); in rte_auxiliary_probe_one_driver()
164 drv->driver.name, dev->name, dev->device.numa_node); in rte_auxiliary_driver_remove_dev()
/dpdk/drivers/bus/vmbus/
H A Dvmbus_common.c103 guid, dev->device.numa_node); in vmbus_probe_one_driver()
115 if (dev->device.numa_node < 0) { in vmbus_probe_one_driver()
119 dev->device.numa_node = 0; in vmbus_probe_one_driver()
/dpdk/drivers/net/af_packet/
H A Drte_eth_af_packet.c665 const unsigned int numa_node = dev->device.numa_node; in rte_pmd_init_internals() local
696 name, numa_node); in rte_pmd_init_internals()
699 0, numa_node); in rte_pmd_init_internals()
707 0, numa_node); in rte_pmd_init_internals()
711 0, numa_node); in rte_pmd_init_internals()
843 rx_queue->rd = rte_zmalloc_socket(name, rdsize, 0, numa_node); in rte_pmd_init_internals()
860 tx_queue->rd = rte_zmalloc_socket(name, rdsize, 0, numa_node); in rte_pmd_init_internals()
1095 if (dev->device.numa_node == SOCKET_ID_ANY) in rte_pmd_af_packet_probe()
1096 dev->device.numa_node = rte_socket_id(); in rte_pmd_af_packet_probe()
/dpdk/examples/pipeline/
H A Dobj.h112 uint32_t numa_node; member
165 int numa_node);
/dpdk/drivers/net/vhost/
H A Drte_eth_vhost.c813 eth_dev->data->numa_node = newnode; in new_device()
1001 unsigned int numa_node = eth_dev->device->numa_node; in vhost_driver_setup() local
1009 list = rte_zmalloc_socket(name, sizeof(*list), 0, numa_node); in vhost_driver_setup()
1014 0, numa_node); in vhost_driver_setup()
1477 numa_node); in eth_dev_vhost_create()
1499 0, numa_node); in eth_dev_vhost_create()
1591 if (dev->device.numa_node == SOCKET_ID_ANY) in rte_pmd_vhost_probe()
1592 dev->device.numa_node = rte_socket_id(); in rte_pmd_vhost_probe()
1686 if (dev->device.numa_node == SOCKET_ID_ANY) in rte_pmd_vhost_probe()
1687 dev->device.numa_node = rte_socket_id(); in rte_pmd_vhost_probe()
[all …]
/dpdk/drivers/net/bonding/
H A Drte_eth_bond_flow.c17 bond_flow_alloc(int numa_node, const struct rte_flow_attr *attr, in bond_flow_alloc() argument
38 RTE_CACHE_LINE_SIZE, numa_node); in bond_flow_alloc()
94 flow = bond_flow_alloc(dev->data->numa_node, attr, patterns, actions); in bond_flow_create()
/dpdk/lib/vhost/
H A Dvhost_user.c525 if (node == vq->numa_node) in numa_realloc()
612 vq->numa_node = node; in numa_realloc()
1318 numa_node = dev->virtqueue[0]->numa_node; in vhost_user_set_mem_table()
1327 numa_node); in vhost_user_set_mem_table()
1563 numa_node = dev->virtqueue[0]->numa_node; in vhost_user_get_inflight_fd()
1684 numa_node = dev->virtqueue[0]->numa_node; in vhost_user_set_inflight_fd()
1850 0, vq->numa_node); in vhost_check_queue_inflights_split()
1860 0, vq->numa_node); in vhost_check_queue_inflights_split()
1948 0, vq->numa_node); in vhost_check_queue_inflights_packed()
1958 0, vq->numa_node); in vhost_check_queue_inflights_packed()
[all …]
/dpdk/drivers/net/bnxt/
H A Dbnxt_ring.c453 bp->eth_dev->device->numa_node); in bnxt_alloc_rxtx_nq_ring()
460 bp->eth_dev->device->numa_node); in bnxt_alloc_rxtx_nq_ring()
475 rc = bnxt_alloc_rings(bp, bp->eth_dev->device->numa_node, 0, NULL, in bnxt_alloc_rxtx_nq_ring()
729 unsigned int soc_id = bp->eth_dev->device->numa_node; in bnxt_alloc_hwrm_rings()
837 bp->eth_dev->device->numa_node); in bnxt_alloc_async_ring_struct()
844 bp->eth_dev->device->numa_node); in bnxt_alloc_async_ring_struct()
861 return bnxt_alloc_rings(bp, bp->eth_dev->device->numa_node, 0, NULL, in bnxt_alloc_async_ring_struct()

12345