| /dpdk/lib/eal/windows/ |
| H A D | eal_memalloc.c | 46 if (ms->len > 0) { in alloc_seg() 57 ms->len, ms->socket_id); in alloc_seg() 153 if (eal_mem_decommit(ms->addr, ms->len)) { in free_seg() 158 ms->addr); in free_seg() 164 memset(ms, 0, sizeof(*ms)); in free_seg() 255 if (wa->ms) in alloc_seg_walk() 256 memset(wa->ms, 0, sizeof(*wa->ms) * wa->n_segs); in alloc_seg_walk() 260 if (wa->ms) in alloc_seg_walk() 340 wa.ms = ms; in eal_memalloc_alloc_seg_bulk() 363 return ms; in eal_memalloc_alloc_seg() [all …]
|
| /dpdk/lib/ipsec/ |
| H A D | misc.h | 50 struct rte_mbuf *ms; in mbuf_get_seg_ofs() local 56 ms = rte_pktmbuf_lastseg(mb); in mbuf_get_seg_ofs() 59 ms = mb; in mbuf_get_seg_ofs() 62 ms = ms->next; in mbuf_get_seg_ofs() 68 return ms; in mbuf_get_seg_ofs() 90 slen = ms->data_len; in mbuf_cut_seg_ofs() 91 ms->data_len = ofs; in mbuf_cut_seg_ofs() 95 mn = ms->next; in mbuf_cut_seg_ofs() 96 ms->next = NULL; in mbuf_cut_seg_ofs() 98 ms = mn->next; in mbuf_cut_seg_ofs() [all …]
|
| /dpdk/lib/eal/common/ |
| H A D | eal_common_memory.c | 352 if (vi->iova >= ms->iova && vi->iova < (ms->iova + ms->len)) { in find_virt() 365 if (vi->iova >= ms->iova && vi->iova < (ms->iova + len)) { in find_virt_legacy() 448 ms->iova, in dump_memseg() 449 ms->len, in dump_memseg() 450 ms->addr, in dump_memseg() 451 ms->socket_id, in dump_memseg() 453 ms->nchannel, in dump_memseg() 454 ms->nrank, in dump_memseg() 542 iova = (ms->iova + ms->len) - 1; in check_iova() 547 ms->iova, ms->len); in check_iova() [all …]
|
| H A D | malloc_heap.c | 322 map_addr = ms[0]->addr; in alloc_pages_on_heap() 399 struct rte_memseg **ms; in try_expand_heap_primary() local 410 ms = malloc(sizeof(*ms) * n_segs); in try_expand_heap_primary() 411 if (ms == NULL) in try_expand_heap_primary() 413 memset(ms, 0, sizeof(*ms) * n_segs); in try_expand_heap_primary() 421 map_addr = ms[0]->addr; in try_expand_heap_primary() 446 free(ms); in try_expand_heap_primary() 459 free(ms); in try_expand_heap_primary() 847 struct rte_memseg *ms; in malloc_heap_free_pages() local 1211 struct rte_memseg *ms; in malloc_heap_create_external_seg() local [all …]
|
| H A D | malloc_mp.c | 46 struct rte_memseg **ms; member 222 struct rte_memseg **ms; in handle_alloc_request() local 257 ms = malloc(sizeof(*ms) * n_segs); in handle_alloc_request() 258 if (ms == NULL) { in handle_alloc_request() 262 memset(ms, 0, sizeof(*ms) * n_segs); in handle_alloc_request() 271 map_addr = ms[0]->addr; in handle_alloc_request() 280 req->alloc_state.ms = ms; in handle_alloc_request() 289 free(ms); in handle_alloc_request() 486 free(entry->alloc_state.ms); in handle_sync_response() 532 free(state->ms); in handle_sync_response() [all …]
|
| H A D | eal_memalloc.h | 28 eal_memalloc_alloc_seg_bulk(struct rte_memseg **ms, int n_segs, size_t page_sz, 35 eal_memalloc_free_seg(struct rte_memseg *ms); 43 eal_memalloc_free_seg_bulk(struct rte_memseg **ms, int n_segs);
|
| H A D | malloc_elem.c | 38 struct rte_memseg *ms; in malloc_elem_find_max_iova_contig() local 65 ms = rte_mem_virt2memseg(cur_page, elem->msl); in malloc_elem_find_max_iova_contig() 73 expected_iova = ms->iova + page_sz; in malloc_elem_find_max_iova_contig() 75 ms++; in malloc_elem_find_max_iova_contig() 84 if (ms->iova != expected_iova) { in malloc_elem_find_max_iova_contig() 93 ms = rte_mem_virt2memseg(contig_seg_start, elem->msl); in malloc_elem_find_max_iova_contig() 94 cur_page = ms->addr; in malloc_elem_find_max_iova_contig() 96 expected_iova = ms->iova; in malloc_elem_find_max_iova_contig() 112 expected_iova = ms->iova + page_sz; in malloc_elem_find_max_iova_contig() 114 ms++; in malloc_elem_find_max_iova_contig()
|
| H A D | eal_common_memalloc.c | 73 const struct rte_memseg *ms; in eal_memalloc_is_contig() local 127 ms = rte_fbarray_get(&msl->memseg_arr, start_seg); in eal_memalloc_is_contig() 128 cur = ms->iova; in eal_memalloc_is_contig() 137 ms = rte_fbarray_get(&msl->memseg_arr, cur_seg); in eal_memalloc_is_contig() 139 if (ms->iova != expected) in eal_memalloc_is_contig()
|
| H A D | malloc_mp.h | 81 bool contig, struct rte_memseg **ms, int n_segs); 84 rollback_expand_heap(struct rte_memseg **ms, int n_segs,
|
| /dpdk/drivers/bus/dpaa/ |
| H A D | rte_dpaa_bus.h | 171 struct dpaa_memseg *ms; in rte_dpaa_mem_ptov() local 181 TAILQ_FOREACH(ms, &rte_dpaa_memsegs, next) { in rte_dpaa_mem_ptov() 182 if (paddr >= ms->iova && paddr < in rte_dpaa_mem_ptov() 183 ms->iova + ms->len) in rte_dpaa_mem_ptov() 184 return RTE_PTR_ADD(ms->vaddr, (uintptr_t)(paddr - ms->iova)); in rte_dpaa_mem_ptov() 198 const struct rte_memseg *ms; in rte_dpaa_mem_vtop() local 200 ms = rte_mem_virt2memseg(vaddr, NULL); in rte_dpaa_mem_vtop() 201 if (ms) in rte_dpaa_mem_vtop() 202 return ms->iova + RTE_PTR_DIFF(vaddr, ms->addr); in rte_dpaa_mem_vtop()
|
| /dpdk/lib/eal/linux/ |
| H A D | eal_memalloc.c | 746 memset(ms->addr, 0, ms->len); in free_seg() 748 if (mmap(ms->addr, ms->len, PROT_NONE, in free_seg() 755 eal_mem_set_dump(ms->addr, ms->len, false); in free_seg() 759 memset(ms, 0, sizeof(*ms)); in free_seg() 799 memset(ms, 0, sizeof(*ms)); in free_seg() 922 if (wa->ms) in alloc_seg_walk() 923 memset(wa->ms, 0, sizeof(*wa->ms) * wa->n_segs); in alloc_seg_walk() 929 if (wa->ms) in alloc_seg_walk() 1055 wa.ms = ms; in eal_memalloc_alloc_seg_bulk() 1085 return ms; in eal_memalloc_alloc_seg() [all …]
|
| H A D | eal_vfio.c | 610 struct rte_memseg *ms; in vfio_mem_event_callback() local 641 ms->addr); in vfio_mem_event_callback() 646 ms->iova, ms->len, 1); in vfio_mem_event_callback() 649 ms->iova, ms->len, 0); in vfio_mem_event_callback() 651 cur_len += ms->len; in vfio_mem_event_callback() 652 ++ms; in vfio_mem_event_callback() 1381 return vfio_type1_dma_mem_map(*vfio_container_fd, ms->addr_64, ms->iova, in type1_map_contig() 1396 if (ms->iova == RTE_BAD_IOVA) in type1_map() 1403 return vfio_type1_dma_mem_map(*vfio_container_fd, ms->addr_64, ms->iova, in type1_map() 1404 ms->len, 1); in type1_map() [all …]
|
| /dpdk/app/test/ |
| H A D | test_memory.c | 35 const struct rte_memseg *ms, void *arg __rte_unused) in check_mem() argument 37 volatile uint8_t *mem = (volatile uint8_t *) ms->addr; in check_mem() 38 size_t i, max = ms->len; in check_mem() 46 check_seg_fds(const struct rte_memseg_list *msl, const struct rte_memseg *ms, in check_seg_fds() argument 57 ret = rte_memseg_get_fd_thread_unsafe(ms); in check_seg_fds() 69 ret = rte_memseg_get_fd_offset_thread_unsafe(ms, &offset); in check_seg_fds()
|
| H A D | test_ring_stress_impl.h | 80 lcore_op_stat_aggr(struct lcore_stat *ms, const struct lcore_stat *ls) in lcore_op_stat_aggr() argument 83 ms->op.nb_call += ls->op.nb_call; in lcore_op_stat_aggr() 84 ms->op.nb_obj += ls->op.nb_obj; in lcore_op_stat_aggr() 85 ms->op.nb_cycle += ls->op.nb_cycle; in lcore_op_stat_aggr() 86 ms->op.max_cycle = RTE_MAX(ms->op.max_cycle, ls->op.max_cycle); in lcore_op_stat_aggr() 87 ms->op.min_cycle = RTE_MIN(ms->op.min_cycle, ls->op.min_cycle); in lcore_op_stat_aggr() 91 lcore_stat_aggr(struct lcore_stat *ms, const struct lcore_stat *ls) in lcore_stat_aggr() argument 93 ms->nb_cycle = RTE_MAX(ms->nb_cycle, ls->nb_cycle); in lcore_stat_aggr() 94 lcore_op_stat_aggr(ms, ls); in lcore_stat_aggr()
|
| /dpdk/lib/eal/include/ |
| H A D | rte_memory.h | 170 const struct rte_memseg *ms, void *arg); 182 const struct rte_memseg *ms, size_t len, void *arg); 334 rte_memseg_get_fd(const struct rte_memseg *ms); 358 rte_memseg_get_fd_thread_unsafe(const struct rte_memseg *ms); 382 rte_memseg_get_fd_offset(const struct rte_memseg *ms, size_t *offset); 406 rte_memseg_get_fd_offset_thread_unsafe(const struct rte_memseg *ms,
|
| /dpdk/drivers/common/dpaax/ |
| H A D | dpaax_iova_table.c | 395 struct rte_memseg *ms; in dpaax_memevent_cb() local 408 ms = rte_mem_virt2memseg(va, msl); in dpaax_memevent_cb() 409 phys_addr = rte_mem_virt2phy(ms->addr); in dpaax_memevent_cb() 410 virt_addr = ms->addr; in dpaax_memevent_cb() 411 map_len = ms->len; in dpaax_memevent_cb() 441 const struct rte_memseg *ms, size_t len, in dpaax_memevent_walk_memsegs() argument 445 ms->addr, ms->iova, len); in dpaax_memevent_walk_memsegs() 446 dpaax_iova_table_update(rte_mem_virt2phy(ms->addr), ms->addr, len); in dpaax_memevent_walk_memsegs()
|
| /dpdk/drivers/crypto/caam_jr/ |
| H A D | caam_jr_pvt.h | 248 const struct rte_memseg *ms; in caam_jr_mem_vtop() local 250 ms = rte_mem_virt2memseg(vaddr, NULL); in caam_jr_mem_vtop() 251 if (ms) in caam_jr_mem_vtop() 252 return ms->iova + RTE_PTR_DIFF(vaddr, ms->addr); in caam_jr_mem_vtop()
|
| /dpdk/drivers/mempool/dpaa/ |
| H A D | dpaa_mempool.c | 321 struct dpaa_memseg *ms; in dpaa_populate() local 327 ms = rte_zmalloc(NULL, sizeof(struct dpaa_memseg), 0); in dpaa_populate() 328 if (!ms) { in dpaa_populate() 339 ms->vaddr = vaddr; in dpaa_populate() 340 ms->iova = paddr; in dpaa_populate() 341 ms->len = len; in dpaa_populate() 345 TAILQ_INSERT_HEAD(&rte_dpaa_memsegs, ms, next); in dpaa_populate()
|
| /dpdk/lib/eal/freebsd/ |
| H A D | eal_memalloc.c | 14 eal_memalloc_alloc_seg_bulk(struct rte_memseg **ms __rte_unused, in eal_memalloc_alloc_seg_bulk() 30 eal_memalloc_free_seg(struct rte_memseg *ms __rte_unused) in eal_memalloc_free_seg() 37 eal_memalloc_free_seg_bulk(struct rte_memseg **ms __rte_unused, in eal_memalloc_free_seg_bulk()
|
| /dpdk/drivers/net/mlx4/ |
| H A D | mlx4_mr.c | 312 const struct rte_memseg *ms; in mr_find_next_chunk() local 315 ms = rte_fbarray_get(&msl->memseg_arr, in mr_find_next_chunk() 319 start = ms->addr_64; in mr_find_next_chunk() 320 end = ms->addr_64 + ms->hugepage_sz; in mr_find_next_chunk() 519 if (data->addr < ms->addr_64 || data->addr >= ms->addr_64 + len) in mr_find_contig_memsegs_cb() 522 data->start = ms->addr_64; in mr_find_contig_memsegs_cb() 523 data->end = ms->addr_64 + len; in mr_find_contig_memsegs_cb() 596 const struct rte_memseg *ms; in mlx4_mr_create_primary() local 652 MLX4_ASSERT(msl->page_sz == ms->hugepage_sz); in mlx4_mr_create_primary() 913 const struct rte_memseg *ms; in mlx4_mr_mem_event_free_cb() local [all …]
|
| /dpdk/drivers/bus/fslmc/ |
| H A D | fslmc_vfio.c | 211 struct rte_memseg *ms; in fslmc_memevent_cb() local 222 ms = rte_mem_virt2memseg(va, msl); in fslmc_memevent_cb() 223 iova_addr = ms->iova; in fslmc_memevent_cb() 224 virt_addr = ms->addr_64; in fslmc_memevent_cb() 225 map_len = ms->len; in fslmc_memevent_cb() 348 const struct rte_memseg *ms, void *arg) in fslmc_dmamap_seg() argument 354 if (ms->iova == RTE_BAD_IOVA) in fslmc_dmamap_seg() 357 ret = fslmc_map_dma(ms->addr_64, ms->iova, ms->len); in fslmc_dmamap_seg() 360 ms->addr, ms->len); in fslmc_dmamap_seg()
|
| /dpdk/drivers/common/mlx5/ |
| H A D | mlx5_common_mr.c | 360 const struct rte_memseg *ms; in mr_find_next_chunk() local 363 ms = rte_fbarray_get(&msl->memseg_arr, in mr_find_next_chunk() 367 start = ms->addr_64; in mr_find_next_chunk() 368 end = ms->addr_64 + ms->hugepage_sz; in mr_find_next_chunk() 578 if (data->addr < ms->addr_64 || data->addr >= ms->addr_64 + len) in mr_find_contig_memsegs_cb() 581 data->start = ms->addr_64; in mr_find_contig_memsegs_cb() 582 data->end = ms->addr_64 + len; in mr_find_contig_memsegs_cb() 661 const struct rte_memseg *ms; in mlx5_mr_create_primary() local 713 MLX5_ASSERT(ms); in mlx5_mr_create_primary() 1185 const struct rte_memseg *ms; in mlx5_free_mr_by_addr() local [all …]
|
| /dpdk/drivers/net/virtio/virtio_user/ |
| H A D | vhost_vdpa.c | 317 const struct rte_memseg *ms, size_t len, void *arg) in vhost_vdpa_map_contig() argument 324 return vhost_vdpa_dma_map(dev, ms->addr, ms->iova, len); in vhost_vdpa_map_contig() 328 vhost_vdpa_map(const struct rte_memseg_list *msl, const struct rte_memseg *ms, in vhost_vdpa_map() argument 338 if (ms->iova == RTE_BAD_IOVA) in vhost_vdpa_map() 345 return vhost_vdpa_dma_map(dev, ms->addr, ms->iova, ms->len); in vhost_vdpa_map()
|
| /dpdk/drivers/net/cxgbe/base/ |
| H A D | t4vf_hw.c | 91 int ms; in t4vf_wr_mbox_core() local 122 ms = delay[0]; in t4vf_wr_mbox_core() 124 for (i = 0; ; i += ms) { in t4vf_wr_mbox_core() 149 ms = delay[delay_idx]; /* last element may repeat */ in t4vf_wr_mbox_core() 152 msleep(ms); in t4vf_wr_mbox_core() 154 rte_delay_ms(ms); in t4vf_wr_mbox_core() 185 ms = delay[0]; in t4vf_wr_mbox_core() 192 ms = delay[delay_idx]; /* last element may repeat */ in t4vf_wr_mbox_core() 195 msleep(ms); in t4vf_wr_mbox_core() 197 rte_delay_ms(ms); in t4vf_wr_mbox_core()
|
| /dpdk/lib/eal/include/generic/ |
| H A D | rte_cycles.h | 148 rte_delay_ms(unsigned ms) in rte_delay_ms() argument 150 rte_delay_us(ms * 1000); in rte_delay_ms()
|