Home
last modified time | relevance | path

Searched refs:pgmap (Results 1 – 25 of 39) sorted by relevance

12

/linux-6.15/mm/
H A Dmemremap.c108 pfn_first(pgmap, range_id)) >> pgmap->vmemmap_shift; in pfn_len()
145 percpu_ref_put_many(&pgmap->ref, pfn_len(pgmap, i)); in memunmap_pages()
296 .pgmap = pgmap, in memremap_pages()
311 if (!pgmap->ops || !pgmap->ops->migrate_to_ram) { in memremap_pages()
413 pgmap); in devm_memremap_pages()
442 if (pgmap) { in get_dev_pagemap()
451 if (pgmap && !percpu_ref_tryget_live_rcu(&pgmap->ref)) in get_dev_pagemap()
455 return pgmap; in get_dev_pagemap()
461 struct dev_pagemap *pgmap = folio->pgmap; in free_zone_device_folio() local
500 if (WARN_ON_ONCE(!pgmap->ops || !pgmap->ops->page_free)) in free_zone_device_folio()
[all …]
H A Dsparse-vmemmap.c480 struct dev_pagemap *pgmap) in reuse_compound_section() argument
482 unsigned long nr_pages = pgmap_vmemmap_nr(pgmap); in reuse_compound_section()
484 PHYS_PFN(pgmap->ranges[pgmap->nr_range].start); in reuse_compound_section()
509 struct dev_pagemap *pgmap) in vmemmap_populate_compound_pages() argument
515 if (reuse_compound_section(start_pfn, pgmap)) { in vmemmap_populate_compound_pages()
529 size = min(end - start, pgmap_vmemmap_nr(pgmap) * sizeof(struct page)); in vmemmap_populate_compound_pages()
563 struct dev_pagemap *pgmap) in __populate_section_memmap() argument
573 if (vmemmap_can_optimize(altmap, pgmap)) in __populate_section_memmap()
574 r = vmemmap_populate_compound_pages(pfn, start, end, nid, pgmap); in __populate_section_memmap()
H A Dgup.c31 struct dev_pagemap *pgmap; member
697 ctx->pgmap = get_dev_pagemap(pfn, ctx->pgmap); in follow_huge_pud()
698 if (!ctx->pgmap) in follow_huge_pud()
879 *pgmap = get_dev_pagemap(pte_pfn(pte), *pgmap); in follow_page_pte()
880 if (*pgmap) in follow_page_pte()
1572 if (ctx.pgmap) in __get_user_pages()
2920 pgmap = get_dev_pagemap(pte_pfn(pte), pgmap); in gup_fast_pte_range()
2972 if (pgmap) in gup_fast_pte_range()
2973 put_dev_pagemap(pgmap); in gup_fast_pte_range()
3007 pgmap = get_dev_pagemap(pfn, pgmap); in gup_fast_devmap_leaf()
[all …]
H A Dsparse.c419 struct dev_pagemap *pgmap) in __populate_section_memmap() argument
672 struct dev_pagemap *pgmap) in populate_section_memmap() argument
674 return __populate_section_memmap(pfn, nr_pages, nid, altmap, pgmap); in populate_section_memmap()
745 struct dev_pagemap *pgmap) in populate_section_memmap() argument
870 struct dev_pagemap *pgmap) in section_activate() argument
902 memmap = populate_section_memmap(pfn, nr_pages, nid, altmap, pgmap); in section_activate()
932 struct dev_pagemap *pgmap) in sparse_add_section() argument
943 memmap = section_activate(nid, start_pfn, nr_pages, altmap, pgmap); in sparse_add_section()
H A Dmm_init.c1006 struct dev_pagemap *pgmap) in __init_zone_device_page() argument
1025 page_folio(page)->pgmap = pgmap; in __init_zone_device_page()
1052 switch (pgmap->type) { in __init_zone_device_page()
1074 struct dev_pagemap *pgmap) in compound_nr_pages() argument
1076 if (!vmemmap_can_optimize(altmap, pgmap)) in compound_nr_pages()
1077 return pgmap_vmemmap_nr(pgmap); in compound_nr_pages()
1085 struct dev_pagemap *pgmap, in memmap_init_compound() argument
1089 unsigned int order = pgmap->vmemmap_shift; in memmap_init_compound()
1112 struct dev_pagemap *pgmap) in memmap_init_zone_device() argument
1116 struct vmem_altmap *altmap = pgmap_altmap(pgmap); in memmap_init_zone_device()
[all …]
H A Dmemory-failure.c1758 struct dev_pagemap *pgmap) in mf_generic_kill_procs() argument
1781 switch (pgmap->type) { in mf_generic_kill_procs()
2154 struct dev_pagemap *pgmap) in memory_failure_dev_pagemap() argument
2159 if (!pgmap_pfn_valid(pgmap, pfn)) in memory_failure_dev_pagemap()
2166 if (pgmap_has_memory_failure(pgmap)) { in memory_failure_dev_pagemap()
2167 rc = pgmap->ops->memory_failure(pgmap, pfn, 1, flags); in memory_failure_dev_pagemap()
2179 put_dev_pagemap(pgmap); in memory_failure_dev_pagemap()
2230 struct dev_pagemap *pgmap; in memory_failure() local
2251 pgmap = get_dev_pagemap(pfn, NULL); in memory_failure()
2253 if (pgmap) { in memory_failure()
[all …]
H A Dmigrate_device.c116 struct dev_pagemap *pgmap; in migrate_vma_collect_pmd() local
144 pgmap = page_pgmap(page); in migrate_vma_collect_pmd()
147 pgmap->owner != migrate->pgmap_owner) in migrate_vma_collect_pmd()
167 pgmap = page_pgmap(page); in migrate_vma_collect_pmd()
171 pgmap->owner != migrate->pgmap_owner) in migrate_vma_collect_pmd()
H A Dmemory_hotplug.c348 struct dev_pagemap *pgmap; in pfn_to_online_page() local
377 pgmap = get_dev_pagemap(pfn, NULL); in pfn_to_online_page()
378 put_dev_pagemap(pgmap); in pfn_to_online_page()
381 if (pgmap) in pfn_to_online_page()
423 params->pgmap); in __add_pages()
/linux-6.15/include/linux/
H A Dmemremap.h145 return pgmap->ops && pgmap->ops->memory_failure; in pgmap_has_memory_failure()
150 if (pgmap->flags & PGMAP_ALTMAP_VALID) in pgmap_altmap()
151 return &pgmap->altmap; in pgmap_altmap()
157 return 1 << pgmap->vmemmap_shift; in pgmap_vmemmap_nr()
204 void memunmap_pages(struct dev_pagemap *pgmap);
208 struct dev_pagemap *pgmap);
214 struct dev_pagemap *pgmap) in devm_memremap_pages() argument
226 struct dev_pagemap *pgmap) in devm_memunmap_pages() argument
231 struct dev_pagemap *pgmap) in get_dev_pagemap() argument
250 if (pgmap) in put_dev_pagemap()
[all …]
H A Dmemory_hotplug.h87 struct dev_pagemap *pgmap; member
323 struct dev_pagemap *pgmap);
/linux-6.15/drivers/xen/
H A Dunpopulated-alloc.c36 struct dev_pagemap *pgmap; in fill_list() local
84 pgmap = kzalloc(sizeof(*pgmap), GFP_KERNEL); in fill_list()
85 if (!pgmap) { in fill_list()
90 pgmap->type = MEMORY_DEVICE_GENERIC; in fill_list()
91 pgmap->range = (struct range) { in fill_list()
95 pgmap->nr_range = 1; in fill_list()
96 pgmap->owner = res; in fill_list()
121 vaddr = memremap_pages(pgmap, NUMA_NO_NODE); in fill_list()
139 kfree(pgmap); in fill_list()
/linux-6.15/drivers/pci/
H A Dp2pdma.c33 struct dev_pagemap pgmap; member
38 return container_of(pgmap, struct pci_p2pdma_pagemap, pgmap); in to_p2p_pgmap()
298 struct dev_pagemap *pgmap; in pci_p2pdma_add_resource() local
325 pgmap = &p2p_pgmap->pgmap; in pci_p2pdma_add_resource()
327 pgmap->range.end = pgmap->range.start + size - 1; in pci_p2pdma_add_resource()
328 pgmap->nr_range = 1; in pci_p2pdma_add_resource()
329 pgmap->type = MEMORY_DEVICE_PCI_P2PDMA; in pci_p2pdma_add_resource()
330 pgmap->ops = &p2pdma_pgmap_ops; in pci_p2pdma_add_resource()
351 &pgmap->ref); in pci_p2pdma_add_resource()
356 pgmap->range.start, pgmap->range.end); in pci_p2pdma_add_resource()
[all …]
/linux-6.15/drivers/dax/
H A Ddevice.c85 if (dev_dax->pgmap->vmemmap_shift) in dax_set_mapping()
399 struct dev_pagemap *pgmap; in dev_dax_probe() local
412 pgmap = dev_dax->pgmap; in dev_dax_probe()
414 if (dev_dax->pgmap) { in dev_dax_probe()
420 pgmap = devm_kzalloc(dev, in dev_dax_probe()
423 if (!pgmap) in dev_dax_probe()
426 pgmap->nr_range = dev_dax->nr_range; in dev_dax_probe()
427 dev_dax->pgmap = pgmap; in dev_dax_probe()
431 pgmap->ranges[i] = *range; in dev_dax_probe()
446 pgmap->type = MEMORY_DEVICE_GENERIC; in dev_dax_probe()
[all …]
H A Dpmem.c19 struct dev_pagemap pgmap = { }; in __dax_pmem_probe() local
33 rc = nvdimm_setup_pfn(nd_pfn, &pgmap); in __dax_pmem_probe()
53 range = pgmap.range; in __dax_pmem_probe()
64 .pgmap = &pgmap, in __dax_pmem_probe()
H A Dbus.h23 struct dev_pagemap *pgmap; member
H A Ddax-private.h90 struct dev_pagemap *pgmap; member
H A Dbus.c437 dev_dax->pgmap = NULL; in kill_dev_dax()
1411 kfree(dev_dax->pgmap); in dev_dax_release()
1463 if (data->pgmap) { in __devm_create_dev_dax()
1467 dev_dax->pgmap = kmemdup(data->pgmap, in __devm_create_dev_dax()
1469 if (!dev_dax->pgmap) { in __devm_create_dev_dax()
1525 kfree(dev_dax->pgmap); in __devm_create_dev_dax()
/linux-6.15/tools/testing/nvdimm/test/
H A Diomap.c99 struct dev_pagemap *pgmap = _pgmap; in nfit_test_kill() local
101 WARN_ON(!pgmap); in nfit_test_kill()
103 percpu_ref_kill(&pgmap->ref); in nfit_test_kill()
105 wait_for_completion(&pgmap->done); in nfit_test_kill()
106 percpu_ref_exit(&pgmap->ref); in nfit_test_kill()
113 complete(&pgmap->done); in dev_pagemap_percpu_release()
119 resource_size_t offset = pgmap->range.start; in __wrap_devm_memremap_pages()
123 return devm_memremap_pages(dev, pgmap); in __wrap_devm_memremap_pages()
125 init_completion(&pgmap->done); in __wrap_devm_memremap_pages()
126 error = percpu_ref_init(&pgmap->ref, dev_pagemap_percpu_release, 0, in __wrap_devm_memremap_pages()
[all …]
/linux-6.15/drivers/nvdimm/
H A Dpmem.c438 container_of(pgmap, struct pmem_device, pgmap); in pmem_pagemap_memory_failure()
515 pmem->pgmap.owner = pmem; in pmem_attach_disk()
518 pmem->pgmap.type = MEMORY_DEVICE_FS_DAX; in pmem_attach_disk()
519 pmem->pgmap.ops = &fsdax_pagemap_ops; in pmem_attach_disk()
524 range_len(&pmem->pgmap.range); in pmem_attach_disk()
525 bb_range = pmem->pgmap.range; in pmem_attach_disk()
528 pmem->pgmap.range.start = res->start; in pmem_attach_disk()
529 pmem->pgmap.range.end = res->end; in pmem_attach_disk()
530 pmem->pgmap.nr_range = 1; in pmem_attach_disk()
532 pmem->pgmap.ops = &fsdax_pagemap_ops; in pmem_attach_disk()
[all …]
H A Dpfn_devs.c683 static int __nvdimm_setup_pfn(struct nd_pfn *nd_pfn, struct dev_pagemap *pgmap) in __nvdimm_setup_pfn() argument
685 struct range *range = &pgmap->range; in __nvdimm_setup_pfn()
686 struct vmem_altmap *altmap = &pgmap->altmap; in __nvdimm_setup_pfn()
706 pgmap->nr_range = 1; in __nvdimm_setup_pfn()
721 pgmap->flags |= PGMAP_ALTMAP_VALID; in __nvdimm_setup_pfn()
856 int nvdimm_setup_pfn(struct nd_pfn *nd_pfn, struct dev_pagemap *pgmap) in nvdimm_setup_pfn() argument
868 return __nvdimm_setup_pfn(nd_pfn, pgmap); in nvdimm_setup_pfn()
H A Dpmem.h29 struct dev_pagemap pgmap; member
/linux-6.15/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_migrate.c1021 struct dev_pagemap *pgmap; in kgd2kfd_init_zone_device() local
1033 pgmap = &kfddev->pgmap; in kgd2kfd_init_zone_device()
1034 memset(pgmap, 0, sizeof(*pgmap)); in kgd2kfd_init_zone_device()
1043 pgmap->type = MEMORY_DEVICE_COHERENT; in kgd2kfd_init_zone_device()
1048 pgmap->range.start = res->start; in kgd2kfd_init_zone_device()
1049 pgmap->range.end = res->end; in kgd2kfd_init_zone_device()
1050 pgmap->type = MEMORY_DEVICE_PRIVATE; in kgd2kfd_init_zone_device()
1053 pgmap->nr_range = 1; in kgd2kfd_init_zone_device()
1054 pgmap->ops = &svm_migrate_pgmap_ops; in kgd2kfd_init_zone_device()
1056 pgmap->flags = 0; in kgd2kfd_init_zone_device()
[all …]
H A Dkfd_svm.h204 #define KFD_IS_SVM_API_SUPPORTED(adev) ((adev)->kfd.pgmap.type != 0 ||\
/linux-6.15/arch/powerpc/include/asm/book3s/64/
H A Dradix.h367 bool vmemmap_can_optimize(struct vmem_altmap *altmap, struct dev_pagemap *pgmap);
374 struct dev_pagemap *pgmap);
/linux-6.15/fs/fuse/
H A Dvirtio_fs.c1059 struct dev_pagemap *pgmap; in virtio_fs_setup_dax() local
1089 pgmap = devm_kzalloc(&vdev->dev, sizeof(*pgmap), GFP_KERNEL); in virtio_fs_setup_dax()
1090 if (!pgmap) in virtio_fs_setup_dax()
1093 pgmap->type = MEMORY_DEVICE_FS_DAX; in virtio_fs_setup_dax()
1100 pgmap->range = (struct range) { in virtio_fs_setup_dax()
1104 pgmap->nr_range = 1; in virtio_fs_setup_dax()
1106 fs->window_kaddr = devm_memremap_pages(&vdev->dev, pgmap); in virtio_fs_setup_dax()

12