Searched refs:cache_map (Results 1 – 5 of 5) sorted by relevance
| /linux-6.15/arch/x86/kernel/cpu/mtrr/ |
| H A D | generic.c | 38 struct cache_map { struct 82 static struct cache_map *cache_map __refdata = init_cache_map; 170 memmove(cache_map + idx, cache_map + idx + 1, in rm_map_entry_at() 193 struct cache_map *prev = cache_map + idx - 1; in add_map_entry_at() 200 struct cache_map *next = cache_map + idx; in add_map_entry_at() 207 cache_map[idx - 1].end = cache_map[idx].end; in add_map_entry_at() 228 memmove(cache_map + idx + 1, cache_map + idx, in add_map_entry_at() 247 if (start == cache_map[idx].start && end == cache_map[idx].end) { in clr_map_range_at() 396 cache_map[i].start, cache_map[i].end - 1, in mtrr_build_map() 414 cache_map = kcalloc(new_size, sizeof(*cache_map), GFP_KERNEL); in mtrr_copy_map() [all …]
|
| /linux-6.15/drivers/perf/ |
| H A D | arm_pmu.c | 121 armpmu_map_cache_event(const unsigned (*cache_map) in armpmu_map_cache_event() 141 if (!cache_map) in armpmu_map_cache_event() 144 ret = (int)(*cache_map)[cache_type][cache_op][cache_result]; in armpmu_map_cache_event() 176 const unsigned (*cache_map) in armpmu_map_event() 192 return armpmu_map_cache_event(cache_map, config); in armpmu_map_event()
|
| /linux-6.15/arch/sparc/kernel/ |
| H A D | perf_event.c | 159 const cache_map_t *cache_map; member 323 .cache_map = &ultra3_cache_map, 461 .cache_map = &niagara1_cache_map, 596 .cache_map = &niagara2_cache_map, 753 .cache_map = &niagara4_cache_map, 783 .cache_map = &niagara4_cache_map, 1201 if (!sparc_pmu->cache_map) in sparc_map_cache_event() 1216 pmap = &((*sparc_pmu->cache_map)[cache_type][cache_op][cache_result]); in sparc_map_cache_event()
|
| /linux-6.15/include/linux/perf/ |
| H A D | arm_pmu.h | 135 const unsigned (*cache_map)[PERF_COUNT_HW_CACHE_MAX]
|
| /linux-6.15/drivers/md/ |
| H A D | dm-cache-target.c | 2650 static int cache_map(struct dm_target *ti, struct bio *bio) in cache_map() function 3523 .map = cache_map,
|