Home
last modified time | relevance | path

Searched refs:rdma_entry (Results 1 – 21 of 21) sorted by relevance

/linux-6.15/drivers/infiniband/hw/mlx5/
H A Ddm.c158 context, &mentry->rdma_entry, size, in add_dm_mmap_entry()
180 page_idx = op_entry->mentry.rdma_entry.start_pgoff & 0xFFFF; in copy_op_to_user()
267 rdma_user_mmap_entry_remove(&op_entry->mentry.rdma_entry); in UVERBS_HANDLER()
317 page_idx = dm->mentry.rdma_entry.start_pgoff & 0xFFFF; in handle_alloc_dm_memic()
333 rdma_user_mmap_entry_remove(&dm->mentry.rdma_entry); in handle_alloc_dm_memic()
459 rdma_user_mmap_entry_remove(&entry->mentry.rdma_entry); in dm_memic_remove_ops()
467 rdma_user_mmap_entry_remove(&dm->mentry.rdma_entry); in mlx5_dm_memic_dealloc()
520 page_idx = memic->mentry.rdma_entry.start_pgoff & 0xFFFF; in UVERBS_HANDLER()
H A Dmlx5_ib.h638 struct rdma_user_mmap_entry rdma_entry; member
1325 to_mmmap(struct rdma_user_mmap_entry *rdma_entry) in to_mmmap() argument
1327 return container_of(rdma_entry, in to_mmmap()
1328 struct mlx5_user_mmap_entry, rdma_entry); in to_mmmap()
H A Dmain.c2400 rdma_user_mmap_entry_put(&mentry->rdma_entry); in mlx5_ib_mmap_offset()
2406 u64 cmd = (entry->rdma_entry.start_pgoff >> 16) & 0xFFFF; in mlx5_entry_to_mmap_offset()
2407 u64 index = entry->rdma_entry.start_pgoff & 0xFFFF; in mlx5_entry_to_mmap_offset()
3733 rdma_user_mmap_entry_remove(&obj->rdma_entry); in mmap_obj_cleanup()
3742 &c->ibucontext, &entry->rdma_entry, length, in mlx5_rdma_user_mmap_entry_insert()
3813 length = entry->rdma_entry.npages * PAGE_SIZE; in UVERBS_HANDLER()
3940 length = entry->rdma_entry.npages * PAGE_SIZE; in UVERBS_HANDLER()
/linux-6.15/drivers/infiniband/hw/hns/
H A Dhns_roce_main.c324 ucontext, &entry->rdma_entry, length, 0); in hns_roce_user_mmap_entry_insert()
328 ucontext, &entry->rdma_entry, length, 1, in hns_roce_user_mmap_entry_insert()
348 &context->db_mmap_entry->rdma_entry); in hns_roce_dealloc_uar_entry()
463 struct rdma_user_mmap_entry *rdma_entry; in hns_roce_mmap() local
474 rdma_entry = rdma_user_mmap_entry_get_pgoff(uctx, vma->vm_pgoff); in hns_roce_mmap()
475 if (!rdma_entry) { in hns_roce_mmap()
480 entry = to_hns_mmap(rdma_entry); in hns_roce_mmap()
494 prot, rdma_entry); in hns_roce_mmap()
497 rdma_user_mmap_entry_put(rdma_entry); in hns_roce_mmap()
504 static void hns_roce_free_mmap(struct rdma_user_mmap_entry *rdma_entry) in hns_roce_free_mmap() argument
[all …]
H A Dhns_roce_device.h208 struct rdma_user_mmap_entry rdma_entry; member
1082 to_hns_mmap(struct rdma_user_mmap_entry *rdma_entry) in to_hns_mmap() argument
1084 return container_of(rdma_entry, struct hns_user_mmap_entry, rdma_entry); in to_hns_mmap()
H A Dhns_roce_qp.c387 rdma_user_mmap_entry_remove(&hr_qp->dwqe_mmap_entry->rdma_entry); in qp_user_mmap_entry_remove()
843 struct rdma_user_mmap_entry *rdma_entry; in qp_mmap_entry() local
858 rdma_entry = &hr_qp->dwqe_mmap_entry->rdma_entry; in qp_mmap_entry()
859 resp->dwqe_mmap_key = rdma_user_mmap_get_offset(rdma_entry); in qp_mmap_entry()
/linux-6.15/drivers/infiniband/hw/efa/
H A Defa_verbs.c30 struct rdma_user_mmap_entry rdma_entry; member
170 return container_of(rdma_entry, struct efa_user_mmap_entry, rdma_entry); in to_emmap()
539 return &entry->rdma_entry; in efa_user_mmap_entry_insert()
1983 struct rdma_user_mmap_entry *rdma_entry; in __efa_mmap() local
1990 if (!rdma_entry) { in __efa_mmap()
1997 entry = to_emmap(rdma_entry); in __efa_mmap()
2008 entry->rdma_entry.npages * PAGE_SIZE, in __efa_mmap()
2010 rdma_entry); in __efa_mmap()
2014 entry->rdma_entry.npages * PAGE_SIZE, in __efa_mmap()
2016 rdma_entry); in __efa_mmap()
[all …]
H A Defa.h179 void efa_mmap_free(struct rdma_user_mmap_entry *rdma_entry);
/linux-6.15/drivers/infiniband/hw/qedr/
H A Dqedr.h499 struct rdma_user_mmap_entry rdma_entry; member
637 get_qedr_mmap_entry(struct rdma_user_mmap_entry *rdma_entry) in get_qedr_mmap_entry() argument
639 return container_of(rdma_entry, struct qedr_user_mmap_entry, in get_qedr_mmap_entry()
640 rdma_entry); in get_qedr_mmap_entry()
H A Dverbs.h49 void qedr_mmap_free(struct rdma_user_mmap_entry *rdma_entry);
H A Dverbs.c310 rc = rdma_user_mmap_entry_insert(uctx, &entry->rdma_entry, in qedr_alloc_ucontext()
316 ctx->db_mmap_entry = &entry->rdma_entry; in qedr_alloc_ucontext()
375 void qedr_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in qedr_mmap_free() argument
392 struct rdma_user_mmap_entry *rdma_entry; in qedr_mmap() local
401 rdma_entry = rdma_user_mmap_entry_get(ucontext, vma); in qedr_mmap()
402 if (!rdma_entry) { in qedr_mmap()
407 entry = get_qedr_mmap_entry(rdma_entry); in qedr_mmap()
417 rdma_entry); in qedr_mmap()
432 rdma_user_mmap_entry_put(rdma_entry); in qedr_mmap()
772 &entry->rdma_entry, in qedr_init_user_db_rec()
[all …]
/linux-6.15/drivers/infiniband/sw/siw/
H A Dsiw_verbs.c47 void siw_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in siw_mmap_free() argument
49 struct siw_user_mmap_entry *entry = to_siw_mmap_entry(rdma_entry); in siw_mmap_free()
58 struct rdma_user_mmap_entry *rdma_entry; in siw_mmap() local
69 rdma_entry = rdma_user_mmap_entry_get(&uctx->base_ucontext, vma); in siw_mmap()
70 if (!rdma_entry) { in siw_mmap()
75 entry = to_siw_mmap_entry(rdma_entry); in siw_mmap()
81 rdma_user_mmap_entry_put(rdma_entry); in siw_mmap()
287 &entry->rdma_entry, in siw_mmap_entry_insert()
294 *offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in siw_mmap_entry_insert()
296 return &entry->rdma_entry; in siw_mmap_entry_insert()
H A Dsiw_verbs.h84 void siw_mmap_free(struct rdma_user_mmap_entry *rdma_entry);
H A Dsiw.h486 struct rdma_user_mmap_entry rdma_entry; member
592 return container_of(rdma_mmap, struct siw_user_mmap_entry, rdma_entry); in to_siw_mmap_entry()
/linux-6.15/drivers/infiniband/hw/erdma/
H A Derdma_verbs.h29 struct rdma_user_mmap_entry rdma_entry; member
427 return container_of(ibmmap, struct erdma_user_mmap_entry, rdma_entry); in to_emmap()
459 void erdma_mmap_free(struct rdma_user_mmap_entry *rdma_entry);
H A Derdma_verbs.c306 ret = rdma_user_mmap_entry_insert(&uctx->ibucontext, &entry->rdma_entry, in erdma_user_mmap_entry_insert()
313 *mmap_offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in erdma_user_mmap_entry_insert()
315 return &entry->rdma_entry; in erdma_user_mmap_entry_insert()
1376 struct rdma_user_mmap_entry *rdma_entry; in erdma_mmap() local
1381 rdma_entry = rdma_user_mmap_entry_get(ctx, vma); in erdma_mmap()
1382 if (!rdma_entry) in erdma_mmap()
1385 entry = to_emmap(rdma_entry); in erdma_mmap()
1398 prot, rdma_entry); in erdma_mmap()
1401 rdma_user_mmap_entry_put(rdma_entry); in erdma_mmap()
1405 void erdma_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in erdma_mmap_free() argument
[all …]
/linux-6.15/drivers/infiniband/hw/irdma/
H A Dmain.h374 to_irdma_mmap_entry(struct rdma_user_mmap_entry *rdma_entry) in to_irdma_mmap_entry() argument
376 return container_of(rdma_entry, struct irdma_user_mmap_entry, in to_irdma_mmap_entry()
377 rdma_entry); in to_irdma_mmap_entry()
H A Dverbs.h226 struct rdma_user_mmap_entry rdma_entry; member
H A Dverbs.c135 static void irdma_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in irdma_mmap_free() argument
156 &entry->rdma_entry, PAGE_SIZE); in irdma_user_mmap_entry_insert()
161 *mmap_offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in irdma_user_mmap_entry_insert()
163 return &entry->rdma_entry; in irdma_user_mmap_entry_insert()
173 struct rdma_user_mmap_entry *rdma_entry; in irdma_mmap() local
185 rdma_entry = rdma_user_mmap_entry_get(&ucontext->ibucontext, vma); in irdma_mmap()
186 if (!rdma_entry) { in irdma_mmap()
193 entry = to_irdma_mmap_entry(rdma_entry); in irdma_mmap()
205 rdma_entry); in irdma_mmap()
210 rdma_entry); in irdma_mmap()
[all …]
/linux-6.15/drivers/infiniband/hw/bnxt_re/
H A Dib_verbs.h161 struct rdma_user_mmap_entry rdma_entry; member
269 void bnxt_re_mmap_free(struct rdma_user_mmap_entry *rdma_entry);
H A Dib_verbs.c645 &entry->rdma_entry, PAGE_SIZE, 0); in bnxt_re_mmap_entry_insert()
653 &entry->rdma_entry, PAGE_SIZE); in bnxt_re_mmap_entry_insert()
737 pd->pd_db_mmap = &entry->rdma_entry; in bnxt_re_alloc_pd()
4331 uctx->shpage_mmap = &entry->rdma_entry; in bnxt_re_alloc_ucontext()
4429 if (!rdma_entry) in bnxt_re_mmap()
4433 rdma_entry); in bnxt_re_mmap()
4440 rdma_entry); in bnxt_re_mmap()
4446 rdma_entry); in bnxt_re_mmap()
4455 rdma_entry); in bnxt_re_mmap()
4471 rdma_user_mmap_entry_put(rdma_entry); in bnxt_re_mmap()
[all …]