Searched refs:folio_mapping (Results 1 – 22 of 22) sorted by relevance
2858 struct address_space *mapping = folio_mapping(folio); in folio_mark_dirty()2917 struct address_space *mapping = folio_mapping(folio); in __folio_cancel_dirty()2952 struct address_space *mapping = folio_mapping(folio); in folio_clear_dirty_for_io()3037 struct address_space *mapping = folio_mapping(folio); in __folio_end_writeback()3077 struct address_space *mapping = folio_mapping(folio); in __folio_start_writeback()3148 trace_folio_wait_writeback(folio, folio_mapping(folio)); in folio_wait_writeback()3170 trace_folio_wait_writeback(folio, folio_mapping(folio)); in folio_wait_writeback_killable()3194 if (mapping_stable_writes(folio_mapping(folio))) in folio_wait_stable()
238 mapping = folio_mapping(folio); in hwpoison_filter_dev()1090 mapping = folio_mapping(folio); in me_pagecache_clean()1126 struct address_space *mapping = folio_mapping(folio); in me_pagecache_dirty()1212 mapping = folio_mapping(folio); in me_huge_page()1580 mapping = folio_mapping(folio); in unmap_poisoned_folio()2611 if (folio_mapping(folio)) { in unpoison_memory()2709 ret = mapping_evict_folio(folio_mapping(folio), folio); in soft_offline_in_use_page()
690 struct address_space *folio_mapping(struct folio *folio) in folio_mapping() function707 EXPORT_SYMBOL(folio_mapping);
363 if (folio_mapping(folio)) in migrate_vma_check_page()751 mapping = folio_mapping(folio); in __migrate_device_pages()
73 struct address_space *mapping = folio_mapping(folio); in __dump_folio()
475 ret = !mapping_unevictable(folio_mapping(folio)) && in folio_evictable()497 struct address_space *mapping = folio_mapping(folio); in folio_needs_release()
1128 mapping = folio_mapping(folio); in folio_mkclean()2871 VM_WARN_ON_FOLIO(folio && mapping != folio_mapping(folio), folio); in __rmap_walk_file()
1038 struct address_space *mapping = folio_mapping(src); in move_to_new_folio()1479 if (hugetlb_folio_subpool(src) && !folio_mapping(src)) { in unmap_and_move_huge_page()
509 if (folio_mapping(folio) == mapping) in handle_write_error()742 BUG_ON(mapping != folio_mapping(folio)); in __remove_mapping()1003 mapping = folio_mapping(folio); in folio_check_dirty_writeback()1393 mapping = folio_mapping(folio); in shrink_folio_list()1471 mapping = folio_mapping(folio); in shrink_folio_list()
1119 mapping = folio_mapping(folio); in isolate_migratepages_block()1177 mapping = folio_mapping(folio); in isolate_migratepages_block()
180 bool page_in_cache = folio_mapping(folio); in mfill_atomic_install_pte()
1988 if (folio_mapping(folio) != mapping) { in collapse_file()
1199 if (folio_mapping(folio) != mapping) { in shmem_undo_range()
1935 struct address_space *mapping = folio_mapping(folio); in hugetlb_folio_mapping_lock_write()
117 :functions: folio_mapping
297 mapping = folio_mapping(folio); in bh_get_inode_and_lblk_num()
535 struct address_space *folio_mapping(struct folio *);555 return folio_mapping(folio); in folio_flush_mapping()
276 filemap_dirty_folio(folio_mapping(dfolio), dfolio); in nilfs_copy_dirty_pages()
255 } else if (folio_mapping(folio)) { in expected_folio_refs()
261 struct address_space *mapping = folio_mapping(folio);
828 filemap_dirty_folio(folio_mapping(folio), folio); in nfs_mark_request_dirty()1801 filemap_dirty_folio(folio_mapping(folio), folio); in nfs_commit_resched_write()
72 mapping = folio_mapping(folio); in page_cache_pipe_buf_try_steal()