Home
last modified time | relevance | path

Searched refs:msl (Results 1 – 25 of 45) sorted by relevance

12

/f-stack/dpdk/lib/librte_eal/common/
H A Deal_common_memory.c202 msl->heap = heap; in eal_memseg_list_init_named()
250 msl->len = mem_sz; in eal_memseg_list_alloc()
290 if (msl == NULL) in virt2memseg()
302 ms_idx = RTE_PTR_DIFF(addr, msl->base_va) / msl->page_sz; in virt2memseg()
325 return msl; in virt2memseg_list()
389 return virt2memseg(addr, msl != NULL ? msl : in rte_mem_virt2memseg()
398 if (msl->external) in physmem_size()
401 *total_len += msl->memseg_arr.count * msl->page_sz; in physmem_size()
794 if (msl == NULL) { in rte_memseg_get_fd_thread_unsafe()
849 if (msl == NULL) { in rte_memseg_get_fd_offset_thread_unsafe()
[all …]
H A Dmalloc_heap.c120 if (msl->external) in malloc_add_seg()
143 msl->socket_id); in malloc_add_seg()
845 msl = elem->msl; in malloc_heap_free()
1119 msl = tmp; in malloc_heap_create_external_seg()
1163 return msl; in malloc_heap_create_external_seg()
1178 if (msl->base_va == wa->va_addr && msl->len == wa->len) { in extseg_walk()
1206 return wa.msl; in malloc_heap_find_external_seg()
1217 memset(msl, 0, sizeof(*msl)); in malloc_heap_destroy_external_seg()
1227 memset(msl->base_va, 0, msl->len); in malloc_heap_add_external_memory()
1230 malloc_heap_add_memory(heap, msl, msl->base_va, msl->len); in malloc_heap_add_external_memory()
[all …]
H A Drte_malloc.c371 ms = rte_mem_virt2memseg(addr, elem->msl); in rte_malloc_virt2iova()
401 struct rte_memseg_list *msl; in rte_malloc_heap_memory_add() local
435 if (msl == NULL) { in rte_malloc_heap_memory_add()
442 msl->heap = 1; /* mark it as heap segment */ in rte_malloc_heap_memory_add()
455 struct rte_memseg_list *msl; in rte_malloc_heap_memory_remove() local
481 if (msl == NULL) { in rte_malloc_heap_memory_remove()
492 ret = malloc_heap_destroy_external_seg(msl); in rte_malloc_heap_memory_remove()
504 struct rte_memseg_list *msl; in sync_memory() local
532 if (msl == NULL) { in sync_memory()
538 ret = rte_fbarray_attach(&msl->memseg_arr); in sync_memory()
[all …]
H A Dmalloc_elem.c49 page_sz = (size_t)elem->msl->page_sz; in malloc_elem_find_max_iova_contig()
64 if (!elem->msl->external && in malloc_elem_find_max_iova_contig()
71 ms = rte_mem_virt2memseg(cur_page, elem->msl); in malloc_elem_find_max_iova_contig()
99 ms = rte_mem_virt2memseg(contig_seg_start, elem->msl); in malloc_elem_find_max_iova_contig()
131 struct rte_memseg_list *msl, size_t size, in malloc_elem_init() argument
135 elem->msl = msl; in malloc_elem_init()
212 elem_check_phys_contig(const struct rte_memseg_list *msl, in elem_check_phys_contig() argument
215 return eal_memalloc_is_contig(msl, start, size); in elem_check_phys_contig()
268 if (!elem_check_phys_contig(elem->msl, in elem_start_pt()
350 elem->next->msl == elem->msl && in next_elem_is_adjacent()
[all …]
H A Deal_private.h278 eal_memseg_list_init_named(struct rte_memseg_list *msl, const char *name,
291 eal_memseg_list_init(struct rte_memseg_list *msl, uint64_t page_sz,
306 eal_memseg_list_alloc(struct rte_memseg_list *msl, int reserve_flags);
319 eal_memseg_list_populate(struct rte_memseg_list *msl, void *addr, int n_segs);
H A Deal_common_dynmem.c28 struct rte_memseg_list *msl; in eal_dynmem_memseg_lists_init() local
186 msl = &mcfg->memsegs[msl_idx++]; in eal_dynmem_memseg_lists_init()
188 if (eal_memseg_list_init(msl, pagesz, n_segs, in eal_dynmem_memseg_lists_init()
192 if (eal_memseg_list_alloc(msl, 0)) { in eal_dynmem_memseg_lists_init()
206 hugepage_count_walk(const struct rte_memseg_list *msl, void *arg) in hugepage_count_walk() argument
210 if (msl->page_sz != hpi->hugepage_sz) in hugepage_count_walk()
213 hpi->num_pages[msl->socket_id] += msl->memseg_arr.len; in hugepage_count_walk()
H A Deal_common_memalloc.c71 eal_memalloc_is_contig(const struct rte_memseg_list *msl, void *start, in eal_memalloc_is_contig() argument
75 size_t pgsz = (size_t)msl->page_sz; in eal_memalloc_is_contig()
81 if (rte_eal_iova_mode() == RTE_IOVA_VA && !msl->external) in eal_memalloc_is_contig()
120 start_seg = RTE_PTR_DIFF(aligned_start, msl->base_va) / in eal_memalloc_is_contig()
122 end_seg = RTE_PTR_DIFF(aligned_end, msl->base_va) / in eal_memalloc_is_contig()
130 ms = rte_fbarray_get(&msl->memseg_arr, start_seg); in eal_memalloc_is_contig()
140 ms = rte_fbarray_get(&msl->memseg_arr, cur_seg); in eal_memalloc_is_contig()
H A Deal_common_memzone.c183 mz->hugepage_sz = elem->msl->page_sz; in memzone_reserve_aligned_thread_unsafe()
184 mz->socket_id = elem->msl->socket_id; in memzone_reserve_aligned_thread_unsafe()
318 struct rte_memseg_list *msl = NULL; in dump_memzone() local
337 msl = rte_mem_virt2memseg_list(mz->addr); in dump_memzone()
338 if (!msl) { in dump_memzone()
347 ms_idx = RTE_PTR_DIFF(mz->addr, msl->base_va) / page_sz; in dump_memzone()
348 ms = rte_fbarray_get(&msl->memseg_arr, ms_idx); in dump_memzone()
H A Dmalloc_heap.h74 malloc_heap_destroy_external_seg(struct rte_memseg_list *msl);
78 struct rte_memseg_list *msl);
/f-stack/dpdk/lib/librte_eal/freebsd/
H A Deal_memory.c68 struct rte_memseg_list *msl; in rte_eal_hugepage_init() local
73 msl = &mcfg->memsegs[0]; in rte_eal_hugepage_init()
92 msl->base_va = addr; in rte_eal_hugepage_init()
93 msl->len = mem_sz; in rte_eal_hugepage_init()
117 struct rte_memseg_list *msl; in rte_eal_hugepage_init() local
147 arr = &msl->memseg_arr; in rte_eal_hugepage_init()
149 if (msl->page_sz != page_sz) in rte_eal_hugepage_init()
180 arr = &msl->memseg_arr; in rte_eal_hugepage_init()
239 if (msl->external) in attach_segment()
338 struct rte_memseg_list *msl; in memseg_primary_init() local
[all …]
/f-stack/dpdk/lib/librte_eal/include/generic/
H A Drte_mcslock.h46 rte_mcslock_lock(rte_mcslock_t **msl, rte_mcslock_t *me) in rte_mcslock_lock() argument
60 prev = __atomic_exchange_n(msl, me, __ATOMIC_ACQ_REL); in rte_mcslock_lock()
99 rte_mcslock_unlock(rte_mcslock_t **msl, rte_mcslock_t *me) in rte_mcslock_unlock() argument
107 if (likely(__atomic_compare_exchange_n(msl, &save_me, NULL, 0, in rte_mcslock_unlock()
138 rte_mcslock_trylock(rte_mcslock_t **msl, rte_mcslock_t *me) in rte_mcslock_trylock() argument
152 return __atomic_compare_exchange_n(msl, &expected, me, 0, in rte_mcslock_trylock()
165 rte_mcslock_is_locked(rte_mcslock_t *msl) in rte_mcslock_is_locked() argument
167 return (__atomic_load_n(&msl, __ATOMIC_RELAXED) != NULL); in rte_mcslock_is_locked()
/f-stack/dpdk/drivers/common/mlx5/
H A Dmlx5_common_mr.c21 const struct rte_memseg_list *msl; member
270 if (mr->msl == NULL) { in mr_find_next_chunk()
289 msl = mr->msl; in mr_find_next_chunk()
511 data->msl = msl; in mr_find_contig_memsegs_cb()
593 const struct rte_memseg_list *msl; in mlx5_mr_create_primary() local
640 MLX5_ASSERT(data.msl); in mlx5_mr_create_primary()
643 msl = data.msl; in mlx5_mr_create_primary()
649 ms_n = len / msl->page_sz; in mlx5_mr_create_primary()
664 mr->msl = msl; in mlx5_mr_create_primary()
689 if (len > msl->page_sz && in mlx5_mr_create_primary()
[all …]
H A Dmlx5_malloc.c68 mlx5_mem_check_msl(void *addr, struct rte_memseg_list *msl) in mlx5_mem_check_msl() argument
72 if (!msl) in mlx5_mem_check_msl()
74 start = msl->base_va; in mlx5_mem_check_msl()
75 end = RTE_PTR_ADD(start, msl->len); in mlx5_mem_check_msl()
/f-stack/dpdk/drivers/net/mlx4/
H A Dmlx4_mr.c44 const struct rte_memseg_list *msl; member
293 if (mr->msl == NULL) { in mr_find_next_chunk()
314 msl = mr->msl; in mr_find_next_chunk()
525 data->msl = msl; in mr_find_contig_memsegs_cb()
596 const struct rte_memseg_list *msl; in mlx4_mr_create_primary() local
650 msl = data.msl; in mlx4_mr_create_primary()
655 ms_n = len / msl->page_sz; in mlx4_mr_create_primary()
674 mr->msl = msl; in mlx4_mr_create_primary()
700 if (len > msl->page_sz && in mlx4_mr_create_primary()
720 MLX4_ASSERT(data.msl == data_re.msl); in mlx4_mr_create_primary()
[all …]
/f-stack/dpdk/lib/librte_eal/linux/
H A Deal_memory.c668 struct rte_memseg_list *msl; local
691 arr = &msl->memseg_arr;
693 if (msl->page_sz != page_sz)
822 memset(msl, 0, sizeof(*msl));
960 if (msl->base_va != NULL)
1147 msl = &mcfg->memsegs[0];
1452 if (msl->base_va == NULL)
1458 mem_sz = msl->len;
1460 msl->base_va = NULL;
1461 msl->heap = 0;
[all …]
H A Deal_memalloc.c803 if (msl->page_sz != wa->page_sz) in alloc_seg_walk()
808 page_sz = (size_t)msl->page_sz; in alloc_seg_walk()
810 msl_idx = msl - mcfg->memsegs; in alloc_seg_walk()
944 msl_idx = msl - mcfg->memsegs; in free_seg_walk()
1369 if (msl->external) in sync_walk()
1372 msl_idx = msl - mcfg->memsegs; in sync_walk()
1420 if (msl->external) in secondary_msl_create_walk()
1423 msl_idx = msl - mcfg->memsegs; in secondary_msl_create_walk()
1484 if (msl->external) in fd_list_create_walk()
1487 msl_idx = msl - mcfg->memsegs; in fd_list_create_walk()
[all …]
H A Deal_vfio.c521 struct rte_memseg_list *msl; in vfio_mem_event_callback() local
526 msl = rte_mem_virt2memseg_list(addr); in vfio_mem_event_callback()
541 ms = rte_mem_virt2memseg(addr, msl); in vfio_mem_event_callback()
1318 if (msl->external) in type1_map_contig()
1332 if (msl->external && !msl->heap) in type1_map()
1340 if (!msl->external && rte_eal_iova_mode() == RTE_IOVA_VA) in type1_map()
1494 vfio_spapr_map_walk(const struct rte_memseg_list *msl, in vfio_spapr_map_walk() argument
1500 if (msl->external && !msl->heap) in vfio_spapr_map_walk()
1526 uint64_t max = (uint64_t) msl->base_va + (uint64_t) msl->len; in vfio_spapr_size_walk()
1528 if (msl->external && !msl->heap) { in vfio_spapr_size_walk()
[all …]
/f-stack/dpdk/drivers/net/mlx5/
H A Dmlx5_mr.c23 const struct rte_memseg_list *msl; member
54 const struct rte_memseg_list *msl; in mlx5_mr_mem_event_free_cb() local
62 msl = rte_mem_virt2memseg_list(addr); in mlx5_mr_mem_event_free_cb()
65 RTE_ALIGN((uintptr_t)addr, msl->page_sz)); in mlx5_mr_mem_event_free_cb()
66 MLX5_ASSERT(len == RTE_ALIGN(len, msl->page_sz)); in mlx5_mr_mem_event_free_cb()
67 ms_n = len / msl->page_sz; in mlx5_mr_mem_event_free_cb()
78 start = (uintptr_t)addr + i * msl->page_sz; in mlx5_mr_mem_event_free_cb()
82 MLX5_ASSERT(mr->msl); /* Can't be external memory. */ in mlx5_mr_mem_event_free_cb()
83 ms = rte_mem_virt2memseg((void *)start, msl); in mlx5_mr_mem_event_free_cb()
85 MLX5_ASSERT(msl->page_sz == ms->hugepage_sz); in mlx5_mr_mem_event_free_cb()
[all …]
/f-stack/dpdk/lib/librte_eal/windows/
H A Deal_memalloc.c185 alloc_seg_walk(const struct rte_memseg_list *msl, void *arg) in alloc_seg_walk() argument
194 if (msl->page_sz != wa->page_sz) in alloc_seg_walk()
196 if (msl->socket_id != wa->socket) in alloc_seg_walk()
199 page_sz = (size_t)msl->page_sz; in alloc_seg_walk()
201 msl_idx = msl - mcfg->memsegs; in alloc_seg_walk()
286 free_seg_walk(const struct rte_memseg_list *msl, void *arg) in free_seg_walk() argument
294 start_addr = (uintptr_t) msl->base_va; in free_seg_walk()
295 end_addr = start_addr + msl->len; in free_seg_walk()
301 msl_idx = msl - mcfg->memsegs; in free_seg_walk()
302 seg_idx = RTE_PTR_DIFF(wa->ms->addr, start_addr) / msl->page_sz; in free_seg_walk()
H A Deal_memory.c653 struct rte_memseg_list *msl; in eal_nohuge_init() local
665 msl = &mcfg->memsegs[0]; in eal_nohuge_init()
672 msl, "nohugemem", page_sz, n_segs, 0, true)) { in eal_nohuge_init()
684 msl->base_va = addr; in eal_nohuge_init()
685 msl->len = mem_sz; in eal_nohuge_init()
687 eal_memseg_list_populate(msl, addr, n_segs); in eal_nohuge_init()
/f-stack/dpdk/app/test/
H A Dtest_memory.c28 check_mem(const struct rte_memseg_list *msl __rte_unused, in check_mem()
40 check_seg_fds(const struct rte_memseg_list *msl, const struct rte_memseg *ms, in check_seg_fds() argument
47 if (msl->external) in check_seg_fds()
/f-stack/dpdk/drivers/net/virtio/virtio_user/
H A Dvhost_vdpa.c127 vhost_vdpa_map_contig(const struct rte_memseg_list *msl, in vhost_vdpa_map_contig() argument
132 if (msl->external) in vhost_vdpa_map_contig()
139 vhost_vdpa_map(const struct rte_memseg_list *msl, const struct rte_memseg *ms, in vhost_vdpa_map() argument
145 if (msl->external && !msl->heap) in vhost_vdpa_map()
153 if (!msl->external && rte_eal_iova_mode() == RTE_IOVA_VA) in vhost_vdpa_map()
H A Dvhost_kernel.c73 add_memseg_list(const struct rte_memseg_list *msl, void *arg) in add_memseg_list() argument
80 if (msl->external) in add_memseg_list()
86 start_addr = msl->base_va; in add_memseg_list()
87 len = msl->page_sz * msl->memseg_arr.len; in add_memseg_list()
/f-stack/dpdk/lib/librte_eal/include/
H A Drte_memory.h147 rte_mem_virt2memseg(const void *virt, const struct rte_memseg_list *msl);
168 typedef int (*rte_memseg_walk_t)(const struct rte_memseg_list *msl,
180 typedef int (*rte_memseg_contig_walk_t)(const struct rte_memseg_list *msl,
191 typedef int (*rte_memseg_list_walk_t)(const struct rte_memseg_list *msl,
/f-stack/dpdk/drivers/common/dpaax/
H A Ddpaax_iova_table.c392 struct rte_memseg_list *msl; in dpaax_memevent_cb() local
401 msl = rte_mem_virt2memseg_list(addr); in dpaax_memevent_cb()
406 ms = rte_mem_virt2memseg(va, msl); in dpaax_memevent_cb()
438 dpaax_memevent_walk_memsegs(const struct rte_memseg_list *msl __rte_unused, in dpaax_memevent_walk_memsegs()

12