Home
last modified time | relevance | path

Searched refs:cached (Results 1 – 25 of 239) sorted by relevance

12345678910

/linux-6.15/tools/lib/api/fs/
H A Dcgroup.c17 static struct cgroupfs_cache_entry *cached; variable
27 if (cached && !strcmp(cached->subsys, subsys)) { in cgroupfs_find_mountpoint()
28 if (strlen(cached->mountpoint) < maxlen) { in cgroupfs_find_mountpoint()
29 strcpy(buf, cached->mountpoint); in cgroupfs_find_mountpoint()
94 if (!cached) in cgroupfs_find_mountpoint()
95 cached = calloc(1, sizeof(*cached)); in cgroupfs_find_mountpoint()
97 if (cached) { in cgroupfs_find_mountpoint()
98 strncpy(cached->subsys, subsys, sizeof(cached->subsys) - 1); in cgroupfs_find_mountpoint()
99 strcpy(cached->mountpoint, mountpoint); in cgroupfs_find_mountpoint()
/linux-6.15/fs/btrfs/
H A Dextent-io-tree.h141 struct extent_state **cached);
143 struct extent_state **cached);
146 struct extent_state **cached) in lock_extent() argument
152 u64 end, struct extent_state **cached) in try_lock_extent() argument
172 u32 bits, struct extent_state **cached,
177 struct extent_state **cached) in clear_extent_bit() argument
183 struct extent_state **cached) in unlock_extent() argument
207 u64 end, struct extent_state **cached) in clear_extent_dirty() argument
211 EXTENT_DO_ACCOUNTING, cached); in clear_extent_dirty()
229 u64 end, struct extent_state **cached) in lock_dio_extent() argument
[all …]
H A Dextent-io-tree.c664 cached = *cached_state; in __clear_extent_bit()
671 if (cached && extent_state_in_tree(cached) && in __clear_extent_bit()
672 cached->start <= start && cached->end > start) { in __clear_extent_bit()
675 state = cached; in __clear_extent_bit()
1659 cached = *cached_state; in count_range_bits()
1664 if (cached->start <= cur_start && cur_start <= cached->end) { in count_range_bits()
1665 state = cached; in count_range_bits()
1678 state = cached; in count_range_bits()
1767 if (cached && extent_state_in_tree(cached) && cached->start <= start && in test_range_bit()
1768 cached->end > start) in test_range_bit()
[all …]
H A Dfiemap.c72 bool cached; member
117 if (!cache->cached) in emit_fiemap_extent()
252 cache->cached = false; in emit_fiemap_extent()
266 cache->cached = false; in emit_fiemap_extent()
270 cache->cached = true; in emit_fiemap_extent()
295 if (!cache->cached) in emit_last_fiemap_cache()
300 cache->cached = false; in emit_last_fiemap_cache()
822 if (cache.cached && cache.offset + cache.len >= last_extent_end) { in extent_fiemap()
/linux-6.15/fs/lockd/
H A Dmon.c343 if (cached != NULL) { in nsm_get_handle()
344 refcount_inc(&cached->sm_count); in nsm_get_handle()
348 "cnt %d\n", cached->sm_name, in nsm_get_handle()
349 cached->sm_addrbuf, in nsm_get_handle()
351 return cached; in nsm_get_handle()
382 struct nsm_handle *cached; in nsm_reboot_lookup() local
388 if (unlikely(cached == NULL)) { in nsm_reboot_lookup()
392 return cached; in nsm_reboot_lookup()
395 refcount_inc(&cached->sm_count); in nsm_reboot_lookup()
399 cached->sm_name, cached->sm_addrbuf, in nsm_reboot_lookup()
[all …]
/linux-6.15/lib/zstd/compress/
H A Dhist.c102 { U32 cached = MEM_read32(ip); ip += 4; in HIST_count_parallel_wksp() local
104 U32 c = cached; cached = MEM_read32(ip); ip += 4; in HIST_count_parallel_wksp()
109 c = cached; cached = MEM_read32(ip); ip += 4; in HIST_count_parallel_wksp()
114 c = cached; cached = MEM_read32(ip); ip += 4; in HIST_count_parallel_wksp()
119 c = cached; cached = MEM_read32(ip); ip += 4; in HIST_count_parallel_wksp()
/linux-6.15/drivers/mtd/maps/
H A Dpxa2xx-flash.c26 unsigned long start = (unsigned long)map->cached + from; in pxa2xx_map_inval_cache()
70 info->map.cached = ioremap_cache(info->map.phys, info->map.size); in pxa2xx_flash_probe()
71 if (!info->map.cached) in pxa2xx_flash_probe()
87 if (info->map.cached) in pxa2xx_flash_probe()
88 iounmap(info->map.cached); in pxa2xx_flash_probe()
109 if (info->map.cached) in pxa2xx_flash_remove()
110 iounmap(info->map.cached); in pxa2xx_flash_remove()
/linux-6.15/tools/perf/util/
H A Dsmt.c9 static bool cached; in smt_on() local
13 if (cached) in smt_on()
21 cached = true; in smt_on()
/linux-6.15/tools/perf/arch/x86/util/
H A Dtopdown.c15 static bool cached; in topdown_sys_has_perf_metrics() local
18 if (cached) in topdown_sys_has_perf_metrics()
31 cached = true; in topdown_sys_has_perf_metrics()
H A Dtsc.c63 static bool cached; in arch_get_tsc_freq() local
67 if (cached) in arch_get_tsc_freq()
70 cached = true; in arch_get_tsc_freq()
/linux-6.15/fs/proc/
H A Dmeminfo.c38 long cached; in meminfo_proc_show() local
48 cached = global_node_page_state(NR_FILE_PAGES) - in meminfo_proc_show()
50 if (cached < 0) in meminfo_proc_show()
51 cached = 0; in meminfo_proc_show()
64 show_val_kb(m, "Cached: ", cached); in meminfo_proc_show()
/linux-6.15/Documentation/ABI/testing/
H A Dsysfs-fs-erofs24 currently used to drop in-memory pclusters and cached
27 - 1 : invalidate cached compressed folios
29 - 3 : drop in-memory pclusters and cached compressed folios
H A Dsysfs-class-iommu-intel-iommu15 The cached hardware capability register value
23 The cached hardware extended capability register
/linux-6.15/drivers/gpu/drm/msm/dsi/phy/
H A Ddsi_phy_10nm.c475 struct pll_10nm_cached_state *cached = &pll_10nm->cached_state; in dsi_10nm_pll_save_state() local
479 cached->pll_out_div = readl(pll_10nm->phy->pll_base + in dsi_10nm_pll_save_state()
481 cached->pll_out_div &= 0x3; in dsi_10nm_pll_save_state()
484 cached->bit_clk_div = cmn_clk_cfg0 & 0xf; in dsi_10nm_pll_save_state()
485 cached->pix_clk_div = (cmn_clk_cfg0 & 0xf0) >> 4; in dsi_10nm_pll_save_state()
488 cached->pll_mux = cmn_clk_cfg1 & 0x3; in dsi_10nm_pll_save_state()
491 pll_10nm->phy->id, cached->pll_out_div, cached->bit_clk_div, in dsi_10nm_pll_save_state()
492 cached->pix_clk_div, cached->pll_mux); in dsi_10nm_pll_save_state()
505 val |= cached->pll_out_div; in dsi_10nm_pll_restore_state()
508 writel(cached->bit_clk_div | (cached->pix_clk_div << 4), in dsi_10nm_pll_restore_state()
[all …]
H A Ddsi_phy_7nm.c568 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; in dsi_7nm_pll_save_state() local
572 cached->pll_out_div = readl(pll_7nm->phy->pll_base + in dsi_7nm_pll_save_state()
574 cached->pll_out_div &= 0x3; in dsi_7nm_pll_save_state()
581 cached->pll_mux = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK, cmn_clk_cfg1); in dsi_7nm_pll_save_state()
584 pll_7nm->phy->id, cached->pll_out_div, cached->bit_clk_div, in dsi_7nm_pll_save_state()
585 cached->pix_clk_div, cached->pll_mux); in dsi_7nm_pll_save_state()
591 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; in dsi_7nm_pll_restore_state() local
597 val |= cached->pll_out_div; in dsi_7nm_pll_restore_state()
601 DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_3_0(cached->bit_clk_div) | in dsi_7nm_pll_restore_state()
602 DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_7_4(cached->pix_clk_div)); in dsi_7nm_pll_restore_state()
[all …]
/linux-6.15/drivers/mtd/devices/
H A Dphram.c38 bool cached; member
89 if (phram->cached) in phram_map()
105 if (phram->cached) { in phram_unmap()
129 bool cached = np ? !of_property_read_bool(np, "no-map") : false; in register_device() local
137 new->cached = cached; in register_device()
/linux-6.15/Documentation/filesystems/
H A Dfuse-io.rst10 - cached
21 In cached mode reads may be satisfied from the page cache, and data may be
25 The cached mode has two sub modes controlling how writes are handled. The
31 WRITE requests, as well as updating any cached pages (and caching previously
/linux-6.15/tools/testing/selftests/drivers/net/lib/py/
H A Denv.py250 cached = self._required_cmd.get(comm, {})
251 if cached.get(key) is None:
252 cached[key] = cmd("command -v -- " + comm, fail=False,
254 self._required_cmd[comm] = cached
255 return cached[key]
/linux-6.15/Documentation/admin-guide/device-mapper/
H A Dwritecache.rst6 doesn't cache reads because reads are supposed to be cached in page cache
17 2. the underlying device that will be cached
58 new writes (however, writes to already cached blocks are
60 writes) and it will gradually writeback any cached
62 process with "dmsetup status". When the number of cached
/linux-6.15/arch/arm/mach-omap2/
H A Dsram.c184 int cached = 1; in omap2_map_sram() local
194 cached = 0; in omap2_map_sram()
201 omap_sram_base = __arm_ioremap_exec(omap_sram_start, omap_sram_size, cached); in omap2_map_sram()
/linux-6.15/sound/soc/mediatek/mt8188/
H A Dmt8188-dai-dmic.c547 unsigned int *cached; in mtk_dai_dmic_hw_gain_ctrl_put() local
553 cached = &dmic_priv->gain_on[0]; in mtk_dai_dmic_hw_gain_ctrl_put()
555 cached = &dmic_priv->gain_on[1]; in mtk_dai_dmic_hw_gain_ctrl_put()
557 cached = &dmic_priv->gain_on[2]; in mtk_dai_dmic_hw_gain_ctrl_put()
559 cached = &dmic_priv->gain_on[3]; in mtk_dai_dmic_hw_gain_ctrl_put()
563 if (source == *cached) in mtk_dai_dmic_hw_gain_ctrl_put()
566 *cached = source; in mtk_dai_dmic_hw_gain_ctrl_put()
/linux-6.15/drivers/gpu/drm/ttm/
H A Dttm_agp_backend.c55 int ret, cached = ttm->caching == ttm_cached; in ttm_agp_bind() local
77 mem->type = (cached) ? AGP_USER_CACHED_MEMORY : AGP_USER_MEMORY; in ttm_agp_bind()
/linux-6.15/fs/bcachefs/
H A Dalloc_foreground.h201 bool cached) in bch2_alloc_sectors_append_ptrs_inlined() argument
214 ptr.cached = cached || in bch2_alloc_sectors_append_ptrs_inlined()
/linux-6.15/arch/s390/appldata/
H A Dappldata_mem.c54 u64 cached; /* size of (used) cache, w/o buffers */ member
102 mem_data->cached = P2K(global_node_page_state(NR_FILE_PAGES) in appldata_get_mem_data()
/linux-6.15/drivers/block/
H A Dps3vram.c428 unsigned int cached, count; in ps3vram_read() local
449 cached = CACHE_OFFSET + entry * priv->cache.page_size + offset; in ps3vram_read()
453 (unsigned int)from, cached, offset, avail, count); in ps3vram_read()
457 memcpy(buf, priv->xdr_buf + cached, avail); in ps3vram_read()
472 unsigned int cached, count; in ps3vram_write() local
490 cached = CACHE_OFFSET + entry * priv->cache.page_size + offset; in ps3vram_write()
494 cached, offset, avail, count); in ps3vram_write()
498 memcpy(priv->xdr_buf + cached, buf, avail); in ps3vram_write()

12345678910