| /dpdk/app/test-compress-perf/ |
| H A D | comp_perf_test_common.c | 95 rte_free(mem->comp_bufs); in comp_perf_free_memory() 131 socket_id, mem->dev_id, mem->qp_id, number); in comp_perf_make_memzone() 194 mem->comp_memzones[i] = comp_perf_make_memzone("comp", mem, in comp_perf_allocate_external_mbufs() 202 mem->decomp_memzones[i] = comp_perf_make_memzone("decomp", mem, in comp_perf_allocate_external_mbufs() 252 mem->dev_id, mem->qp_id); in comp_perf_allocate_memory() 264 mem->dev_id, mem->qp_id); in comp_perf_allocate_memory() 279 mem->dev_id, mem->qp_id); in comp_perf_allocate_memory() 283 mem->total_bufs * 2, in comp_perf_allocate_memory() 357 mem->decomp_bufs[i] = in prepare_bufs() 434 mem->comp_bufs[i] = in prepare_bufs() [all …]
|
| H A D | comp_perf_test_cyclecount.c | 48 ctx->ver.mem.qp_id = qp_id; in cperf_cyclecount_test_constructor() 69 struct cperf_mem_resources *mem = &ctx->ver.mem; in cperf_cyclecount_op_setup() local 90 mem->op_pool, in cperf_cyclecount_op_setup() 154 struct cperf_mem_resources *mem = &ctx->ver.mem; in main_loop() local 276 mem->op_pool, in main_loop() 317 mem->qp_id, ops, in main_loop() 345 mem->qp_id, in main_loop() 396 mem->qp_id, in main_loop() 458 mem->lcore_id); in main_loop() 505 ctx->ver.mem.dev_id, in cperf_cyclecount_test_runner() [all …]
|
| H A D | comp_perf_test_throughput.c | 35 ctx->ver.mem.dev_id = dev_id; in cperf_throughput_test_constructor() 36 ctx->ver.mem.qp_id = qp_id; in cperf_throughput_test_constructor() 52 struct cperf_mem_resources *mem = &ctx->ver.mem; in main_loop() local 152 mem->op_pool, in main_loop() 192 mem->qp_id, ops, in main_loop() 209 mem->qp_id, in main_loop() 253 mem->qp_id, in main_loop() 320 mem->lcore_id); in main_loop() 352 ctx->ver.mem.dev_id, in cperf_throughput_test_runner() 354 ctx->ver.mem.qp_id); in cperf_throughput_test_runner() [all …]
|
| H A D | comp_perf_test_verify.c | 35 ctx->mem.dev_id = dev_id; in cperf_verify_test_constructor() 36 ctx->mem.qp_id = qp_id; in cperf_verify_test_constructor() 53 struct cperf_mem_resources *mem = &ctx->mem; in main_loop() local 55 uint8_t dev_id = mem->dev_id; in main_loop() 157 mem->op_pool, in main_loop() 197 mem->qp_id, ops, in main_loop() 214 mem->qp_id, in main_loop() 289 mem->qp_id, in main_loop() 378 mem->lcore_id); in main_loop() 394 ctx->mem.lcore_id = lcore; in cperf_verify_test_runner() [all …]
|
| H A D | comp_perf_test_common.h | 42 struct cperf_mem_resources *mem); 46 struct cperf_mem_resources *mem); 49 prepare_bufs(struct comp_test_data *test_data, struct cperf_mem_resources *mem);
|
| /dpdk/drivers/common/iavf/ |
| H A D | iavf_impl.c | 25 if (!mem) in iavf_allocate_dma_mem_d() 36 mem->size = size; in iavf_allocate_dma_mem_d() 48 if (!mem) in iavf_free_dma_mem_d() 52 mem->zone = NULL; in iavf_free_dma_mem_d() 53 mem->va = NULL; in iavf_free_dma_mem_d() 54 mem->pa = (u64)0; in iavf_free_dma_mem_d() 64 if (!mem) in iavf_allocate_virt_mem_d() 67 mem->size = size; in iavf_allocate_virt_mem_d() 70 if (mem->va) in iavf_allocate_virt_mem_d() 80 if (!mem) in iavf_free_virt_mem_d() [all …]
|
| H A D | iavf_alloc.h | 25 struct iavf_dma_mem *mem, 29 struct iavf_dma_mem *mem); 31 struct iavf_virt_mem *mem, 34 struct iavf_virt_mem *mem);
|
| /dpdk/drivers/mempool/cnxk/ |
| H A D | cn10k_mempool_ops.c | 115 struct batch_op_mem *mem = &op_data->mem[i]; in batch_op_fini() local 119 mem->objs, mem->objs, BATCH_ALLOC_SZ); in batch_op_fini() 124 mem->sz, 1); in batch_op_fini() 171 struct batch_op_mem *mem = &op_data->mem[i]; in cn10k_mempool_get_count() local 178 count += mem->sz; in cn10k_mempool_get_count() 197 mem = &op_data->mem[tid]; in cn10k_mempool_deq() 215 mem->objs, mem->objs, BATCH_ALLOC_SZ); in cn10k_mempool_deq() 226 cur_sz = mem->sz; in cn10k_mempool_deq() 229 memcpy(&obj_table[count], &mem->objs[mem->sz - cur_sz], in cn10k_mempool_deq() 231 mem->sz -= cur_sz; in cn10k_mempool_deq() [all …]
|
| /dpdk/drivers/vdpa/mlx5/ |
| H A D | mlx5_vdpa_mem.c | 75 struct rte_vhost_memory *mem; in mlx5_vdpa_vhost_mem_regions_prepare() local 87 qsort(mem->regions, mem->nregions, sizeof(mem->regions[0]), in mlx5_vdpa_vhost_mem_regions_prepare() 89 *mem_size = (mem->regions[(mem->nregions - 1)].guest_phys_addr) + in mlx5_vdpa_vhost_mem_regions_prepare() 90 (mem->regions[(mem->nregions - 1)].size) - in mlx5_vdpa_vhost_mem_regions_prepare() 97 mem->regions[i].guest_phys_addr, mem->regions[i].size); in mlx5_vdpa_vhost_mem_regions_prepare() 144 free(mem); in mlx5_vdpa_vhost_mem_regions_prepare() 147 return mem; in mlx5_vdpa_vhost_mem_regions_prepare() 207 if (!mem) in mlx5_vdpa_mem_register() 212 free(mem); in mlx5_vdpa_mem_register() 217 priv->vmem = mem; in mlx5_vdpa_mem_register() [all …]
|
| /dpdk/lib/eal/include/ |
| H A D | rte_trace_point.h | 293 uint8_t mem[]; member 323 void *mem = RTE_PTR_ADD(&trace->mem[0], offset); in __rte_trace_mem_get() local 327 return mem; in __rte_trace_mem_get() 342 *(uint64_t *)mem = val; in __rte_trace_point_emit_ev_header() 347 void *mem; \ 352 mem = __rte_trace_mem_get(val); \ 353 if (unlikely(mem == NULL)) \ 355 mem = __rte_trace_point_emit_ev_header(mem, val); \ 365 memcpy(mem, &(in), sizeof(in)); \ 366 mem = RTE_PTR_ADD(mem, sizeof(in)); \ [all …]
|
| H A D | rte_bitmap.h | 170 rte_bitmap_init(uint32_t n_bits, uint8_t *mem, uint32_t mem_size) in rte_bitmap_init() argument 181 if ((mem == NULL) || (((uintptr_t) mem) & RTE_CACHE_LINE_MASK)) { in rte_bitmap_init() 192 memset(mem, 0, size); in rte_bitmap_init() 193 bmp = (struct rte_bitmap *) mem; in rte_bitmap_init() 195 bmp->array1 = (uint64_t *) &mem[array1_byte_offset]; in rte_bitmap_init() 197 bmp->array2 = (uint64_t *) &mem[array2_byte_offset]; in rte_bitmap_init() 254 rte_bitmap_init_with_all_set(uint32_t n_bits, uint8_t *mem, uint32_t mem_size) in rte_bitmap_init_with_all_set() argument 262 if (!n_bits || !mem || (((uintptr_t) mem) & RTE_CACHE_LINE_MASK)) in rte_bitmap_init_with_all_set() 272 bmp = (struct rte_bitmap *) mem; in rte_bitmap_init_with_all_set() 273 bmp->array1 = (uint64_t *) &mem[array1_byte_offset]; in rte_bitmap_init_with_all_set() [all …]
|
| /dpdk/app/test/ |
| H A D | test_bitmap.c | 183 void *mem; in test_bitmap_all_clear() local 190 mem = rte_zmalloc("test_bmap", bmp_size, RTE_CACHE_LINE_SIZE); in test_bitmap_all_clear() 191 if (mem == NULL) { in test_bitmap_all_clear() 196 bmp = rte_bitmap_init(MAX_BITS, mem, bmp_size); in test_bitmap_all_clear() 212 rte_free(mem); in test_bitmap_all_clear() 220 void *mem; in test_bitmap_all_set() local 230 mem = rte_zmalloc("test_bmap", bmp_size, RTE_CACHE_LINE_SIZE); in test_bitmap_all_set() 231 if (mem == NULL) { in test_bitmap_all_set() 236 bmp = rte_bitmap_init_with_all_set(MAX_BITS, mem, bmp_size); in test_bitmap_all_set() 258 rte_free(mem); in test_bitmap_all_set()
|
| H A D | test_malloc.c | 632 void *mem; in test_realloc() local 676 munmap(mem, mem_sz); in test_realloc() 921 char *mem = NULL; in test_alloc_single_socket() local 927 if (mem == NULL) in test_alloc_single_socket() 930 rte_free(mem); in test_alloc_single_socket() 933 rte_free(mem); in test_alloc_single_socket() 937 if (mem == NULL) in test_alloc_single_socket() 942 rte_free(mem); in test_alloc_single_socket() 946 if (mem == NULL) in test_alloc_single_socket() 949 rte_free(mem); in test_alloc_single_socket() [all …]
|
| H A D | test_memory.c | 37 volatile uint8_t *mem = (volatile uint8_t *) ms->addr; in check_mem() local 40 for (i = 0; i < max; i++, mem++) in check_mem() 41 *mem; in check_mem()
|
| /dpdk/drivers/net/ice/base/ |
| H A D | ice_osdep.h | 260 if (!mem) in ice_alloc_dma_mem() 270 mem->size = size; in ice_alloc_dma_mem() 271 mem->va = mz->addr; in ice_alloc_dma_mem() 272 mem->pa = mz->iova; in ice_alloc_dma_mem() 273 mem->zone = (const void *)mz; in ice_alloc_dma_mem() 277 return mem->va; in ice_alloc_dma_mem() 282 struct ice_dma_mem *mem) in ice_free_dma_mem() argument 286 mem->pa); in ice_free_dma_mem() 288 mem->zone = NULL; in ice_free_dma_mem() 289 mem->va = NULL; in ice_free_dma_mem() [all …]
|
| /dpdk/drivers/net/i40e/base/ |
| H A D | i40e_alloc.h | 25 struct i40e_dma_mem *mem, 29 struct i40e_dma_mem *mem); 31 struct i40e_virt_mem *mem, 34 struct i40e_virt_mem *mem);
|
| H A D | i40e_dcb.c | 565 struct i40e_virt_mem mem; in i40e_aq_get_dcb_config() local 573 lldpmib = (u8 *)mem.va; in i40e_aq_get_dcb_config() 954 struct i40e_virt_mem mem; in i40e_get_fw_lldp_status() local 965 lldpmib = (u8 *)mem.va; in i40e_get_fw_lldp_status() 1250 struct i40e_virt_mem mem; in i40e_set_dcb_config() local 1266 lldpmib = (u8 *)mem.va; in i40e_set_dcb_config() 1328 u16 mem; in _i40e_read_lldp_cfg() local 1340 mem = LE16_TO_CPU(raw_mem); in _i40e_read_lldp_cfg() 1344 if (mem & I40E_PTR_TYPE) in _i40e_read_lldp_cfg() 1359 mem = LE16_TO_CPU(raw_mem); in _i40e_read_lldp_cfg() [all …]
|
| H A D | i40e_hmc.c | 30 struct i40e_dma_mem mem; in i40e_add_sd_table_entry() local 56 ret_code = i40e_allocate_dma_mem(hw, &mem, mem_type, alloc_len, in i40e_add_sd_table_entry() 71 &mem, sizeof(struct i40e_dma_mem), in i40e_add_sd_table_entry() 75 &mem, sizeof(struct i40e_dma_mem), in i40e_add_sd_table_entry() 91 i40e_free_dma_mem(hw, &mem); in i40e_add_sd_table_entry() 121 struct i40e_dma_mem mem; in i40e_add_pd_table_entry() local 122 struct i40e_dma_mem *page = &mem; in i40e_add_pd_table_entry()
|
| /dpdk/drivers/net/cnxk/ |
| H A D | cnxk_lookup.c | 229 nix_create_rx_ol_flags_array(void *mem) in nix_create_rx_ol_flags_array() argument 235 ol_flags = (uint32_t *)((uint8_t *)mem + PTYPE_ARRAY_SZ); in nix_create_rx_ol_flags_array() 303 void *mem; in cnxk_nix_fastpath_lookup_mem_get() local 313 mem = mz->addr; in cnxk_nix_fastpath_lookup_mem_get() 315 nix_create_non_tunnel_ptype_array(mem); in cnxk_nix_fastpath_lookup_mem_get() 316 nix_create_tunnel_ptype_array(mem); in cnxk_nix_fastpath_lookup_mem_get() 318 nix_create_rx_ol_flags_array(mem); in cnxk_nix_fastpath_lookup_mem_get() 319 return mem; in cnxk_nix_fastpath_lookup_mem_get()
|
| /dpdk/drivers/raw/cnxk_bphy/ |
| H A D | cnxk_bphy.c | 176 struct cnxk_bphy_mem *mem; in cnxk_bphy_irq_enqueue_bufs() local 209 mem = rte_zmalloc(NULL, sizeof(*mem), 0); in cnxk_bphy_irq_enqueue_bufs() 210 if (!mem) in cnxk_bphy_irq_enqueue_bufs() 213 *mem = bphy_dev->mem; in cnxk_bphy_irq_enqueue_bufs() 214 rsp = mem; in cnxk_bphy_irq_enqueue_bufs() 342 bphy_dev->mem.res0 = pci_dev->mem_resource[0]; in bphy_rawdev_probe() 343 bphy_dev->mem.res2 = pci_dev->mem_resource[2]; in bphy_rawdev_probe()
|
| /dpdk/lib/acl/ |
| H A D | tb_mem.c | 42 block->mem = RTE_PTR_ALIGN_CEIL(ptr, pool->alignment); in tb_pool() 43 block->size = size - (block->mem - ptr); in tb_pool() 62 ptr = block->mem; in tb_alloc() 64 block->mem += size; in tb_alloc()
|
| /dpdk/examples/vhost/ |
| H A D | virtio_net.c | 37 ret = rte_vhost_get_mem_table(vid, &dev->mem); in vs_vhost_net_setup() 57 free(dev->mem); in vs_vhost_net_remove() 78 dev->mem, desc_gaddr, &desc_chunck_len); in enqueue_pkt() 101 dst = rte_vhost_va_from_guest_pa(dev->mem, in enqueue_pkt() 118 dev->mem, desc_gaddr, in enqueue_pkt() 153 dev->mem, desc_gaddr, &desc_chunck_len); in enqueue_pkt() 162 desc_addr = rte_vhost_va_from_guest_pa(dev->mem, in enqueue_pkt() 264 dev->mem, desc_gaddr, &desc_chunck_len); in dequeue_pkt() 281 dev->mem, desc_gaddr, &desc_chunck_len); in dequeue_pkt() 318 dev->mem, desc_gaddr, &desc_chunck_len); in dequeue_pkt() [all …]
|
| /dpdk/lib/eal/common/ |
| H A D | eal_common_trace_points.c | 50 lib.eal.mem.zmalloc) 52 lib.eal.mem.malloc) 54 lib.eal.mem.realloc) 56 lib.eal.mem.free)
|
| /dpdk/doc/guides/sample_app_ug/ |
| H A D | vhost_blk.rst | 49 -m $mem -object memory-backend-file,id=mem,size=$mem,\ 50 mem-path=/dev/hugepages,share=on -numa node,memdev=mem \
|
| /dpdk/lib/vhost/ |
| H A D | rte_vhost.h | 341 rte_vhost_gpa_to_vva(struct rte_vhost_memory *mem, uint64_t gpa) in rte_vhost_gpa_to_vva() argument 346 for (i = 0; i < mem->nregions; i++) { in rte_vhost_gpa_to_vva() 347 reg = &mem->regions[i]; in rte_vhost_gpa_to_vva() 375 rte_vhost_va_from_guest_pa(struct rte_vhost_memory *mem, in rte_vhost_va_from_guest_pa() argument 381 for (i = 0; i < mem->nregions; i++) { in rte_vhost_va_from_guest_pa() 382 r = &mem->regions[i]; in rte_vhost_va_from_guest_pa() 767 int rte_vhost_get_mem_table(int vid, struct rte_vhost_memory **mem);
|