| /linux-6.15/fs/xfs/ |
| H A D | xfs_stats.h | 170 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v++; \ 171 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v++; \ 176 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v--; \ 177 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v--; \ 182 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v += (inc); \ 183 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v += (inc); \ 188 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->a[off]++; \ 189 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->a[off]++; \ 194 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->a[off]; \ 195 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->a[off]; \ [all …]
|
| H A D | xfs_stats.c | 15 val += *(((__u32 *)per_cpu_ptr(stats, cpu) + idx)); in counter_val() 76 xs_xstrat_bytes += per_cpu_ptr(stats, i)->s.xs_xstrat_bytes; in xfs_stats_format() 77 xs_write_bytes += per_cpu_ptr(stats, i)->s.xs_write_bytes; in xfs_stats_format() 78 xs_read_bytes += per_cpu_ptr(stats, i)->s.xs_read_bytes; in xfs_stats_format() 79 defer_relog += per_cpu_ptr(stats, i)->s.defer_relog; in xfs_stats_format() 105 vn_active = per_cpu_ptr(stats, c)->s.vn_active; in xfs_stats_clearall() 106 memset(per_cpu_ptr(stats, c), 0, sizeof(*stats)); in xfs_stats_clearall() 107 per_cpu_ptr(stats, c)->s.vn_active = vn_active; in xfs_stats_clearall()
|
| /linux-6.15/include/linux/ |
| H A D | context_tracking_state.h | 66 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_rcu_watching_cpu() 73 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_rcu_watching_cpu_acquire() 85 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_nesting_cpu() 97 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_nmi_nesting_cpu()
|
| H A D | part_stat.h | 29 (per_cpu_ptr((part)->bd_stats, (cpu))->field) 39 res += per_cpu_ptr((part)->bd_stats, _cpu)->field; \ 48 memset(per_cpu_ptr(part->bd_stats, i), value, in part_stat_set_all()
|
| /linux-6.15/drivers/infiniband/ulp/rtrs/ |
| H A D | rtrs-clt-stats.c | 27 s = per_cpu_ptr(stats->pcpu_stats, con->cpu); in rtrs_clt_update_wc_stats() 47 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_stats_migration_from_cnt_to_str() 66 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_stats_migration_to_cnt_to_str() 90 r = &per_cpu_ptr(stats->pcpu_stats, cpu)->rdma; in rtrs_clt_stats_rdma_to_str() 119 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_reset_rdma_stats() 135 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_reset_cpu_migr_stats()
|
| /linux-6.15/kernel/ |
| H A D | smpboot.c | 172 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in __smpboot_create_thread() 197 *per_cpu_ptr(ht->store, cpu) = tsk; in __smpboot_create_thread() 230 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_unpark_thread() 249 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_park_thread() 272 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_destroy_threads() 276 *per_cpu_ptr(ht->store, cpu) = NULL; in smpboot_destroy_threads()
|
| H A D | relay.c | 204 *per_cpu_ptr(chan->buf, buf->cpu) = NULL; in relay_destroy_buf() 334 if ((buf = *per_cpu_ptr(chan->buf, i))) in relay_reset() 381 return *per_cpu_ptr(chan->buf, 0); in relay_open_buf() 405 *per_cpu_ptr(chan->buf, 0) = buf; in relay_open_buf() 439 if (*per_cpu_ptr(chan->buf, cpu)) in relay_prepare_cpu() 447 *per_cpu_ptr(chan->buf, cpu) = buf; in relay_prepare_cpu() 519 *per_cpu_ptr(chan->buf, i) = buf; in relay_open() 528 if ((buf = *per_cpu_ptr(chan->buf, i))) in relay_open() 593 buf = *per_cpu_ptr(chan->buf, 0); in relay_late_setup_files() 612 buf = *per_cpu_ptr(chan->buf, i); in relay_late_setup_files() [all …]
|
| H A D | cpu.c | 173 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_invoke_callback() 311 atomic_t *st = per_cpu_ptr(&cpuhp_state.ap_sync_state, cpu); in cpuhp_wait_for_sync_state() 362 atomic_t *st = per_cpu_ptr(&cpuhp_state.ap_sync_state, cpu); in cpuhp_bp_sync_dead() 404 atomic_t *st = per_cpu_ptr(&cpuhp_state.ap_sync_state, cpu); in cpuhp_can_boot_ap() 795 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in bringup_wait_for_ap_online() 828 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_bringup_ap() 860 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in bringup_cpu() 1130 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_invoke_ap_callback() 1179 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_kick_ap_work() 1210 st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_init_state() [all …]
|
| /linux-6.15/kernel/irq/ |
| H A D | matrix.c | 72 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_alloc_matrix() 144 cm = per_cpu_ptr(m->maps, cpu); in matrix_find_best_cpu() 165 cm = per_cpu_ptr(m->maps, cpu); in matrix_find_best_cpu_managed() 221 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_reserve_managed() 262 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_remove_managed() 305 cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_alloc_managed() 400 cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_alloc() 427 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_free() 510 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_debug_show()
|
| /linux-6.15/kernel/sched/ |
| H A D | cpuacct.c | 97 u64 *cpuusage = per_cpu_ptr(ca->cpuusage, cpu); in cpuacct_cpuusage_read() 98 u64 *cpustat = per_cpu_ptr(ca->cpustat, cpu)->cpustat; in cpuacct_cpuusage_read() 137 u64 *cpuusage = per_cpu_ptr(ca->cpuusage, cpu); in cpuacct_cpuusage_write() 138 u64 *cpustat = per_cpu_ptr(ca->cpustat, cpu)->cpustat; in cpuacct_cpuusage_write() 270 u64 *cpustat = per_cpu_ptr(ca->cpustat, cpu)->cpustat; in cpuacct_stats_show() 278 cputime.sum_exec_runtime += *per_cpu_ptr(ca->cpuusage, cpu); in cpuacct_stats_show() 342 *per_cpu_ptr(ca->cpuusage, cpu) += cputime; in cpuacct_charge()
|
| H A D | topology.c | 934 sibling = *per_cpu_ptr(sdd->sd, i); in build_balance_mask() 1213 sg = *per_cpu_ptr(sdd->sg, cpu); in get_group() 1537 *per_cpu_ptr(sdd->sd, cpu) = NULL; in claim_allocations() 1543 *per_cpu_ptr(sdd->sg, cpu) = NULL; in claim_allocations() 2252 *per_cpu_ptr(sdd->sd, j) = sd; in __sdt_alloc() 2259 *per_cpu_ptr(sdd->sds, j) = sds; in __sdt_alloc() 2268 *per_cpu_ptr(sdd->sg, j) = sg; in __sdt_alloc() 2277 *per_cpu_ptr(sdd->sgc, j) = sgc; in __sdt_alloc() 2296 sd = *per_cpu_ptr(sdd->sd, j); in __sdt_free() 2419 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains() [all …]
|
| /linux-6.15/kernel/bpf/ |
| H A D | percpu_freelist.c | 15 struct pcpu_freelist_head *head = per_cpu_ptr(s->freelist, cpu); in pcpu_freelist_init() 58 head = per_cpu_ptr(s->freelist, cpu); in __pcpu_freelist_push() 89 head = per_cpu_ptr(s->freelist, cpu); in pcpu_freelist_populate() 107 head = per_cpu_ptr(s->freelist, cpu); in ___pcpu_freelist_pop()
|
| H A D | bpf_lru_list.c | 413 l = per_cpu_ptr(lru->percpu_lru, cpu); in bpf_percpu_lru_pop_free() 446 loc_l = per_cpu_ptr(clru->local_list, cpu); in bpf_common_lru_pop_free() 475 steal_loc_l = per_cpu_ptr(clru->local_list, steal); in bpf_common_lru_pop_free() 520 loc_l = per_cpu_ptr(lru->common_lru.local_list, node->cpu); in bpf_common_lru_push_free() 547 l = per_cpu_ptr(lru->percpu_lru, node->cpu); in bpf_percpu_lru_push_free() 597 l = per_cpu_ptr(lru->percpu_lru, cpu); in bpf_percpu_lru_populate() 664 l = per_cpu_ptr(lru->percpu_lru, cpu); in bpf_lru_init() 678 loc_l = per_cpu_ptr(clru->local_list, cpu); in bpf_lru_init()
|
| /linux-6.15/arch/x86/events/amd/ |
| H A D | uncore.c | 223 ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_event_init() 426 ctx = *per_cpu_ptr(pmu->ctx, cpu); in amd_uncore_ctx_free() 438 *per_cpu_ptr(pmu->ctx, cpu) = NULL; in amd_uncore_ctx_free() 456 *per_cpu_ptr(pmu->ctx, cpu) = NULL; in amd_uncore_ctx_init() 468 prev = *per_cpu_ptr(pmu->ctx, j); in amd_uncore_ctx_init() 498 *per_cpu_ptr(pmu->ctx, cpu) = curr; in amd_uncore_ctx_init() 520 curr = *per_cpu_ptr(pmu->ctx, cpu); in amd_uncore_ctx_move() 526 next = *per_cpu_ptr(pmu->ctx, j); in amd_uncore_ctx_move() 653 *per_cpu_ptr(uncore->info, cpu) = info; in amd_uncore_df_ctx_scan() 787 *per_cpu_ptr(uncore->info, cpu) = info; in amd_uncore_l3_ctx_scan() [all …]
|
| /linux-6.15/arch/s390/kernel/ |
| H A D | smp.c | 405 if (pcpu_running(per_cpu_ptr(&pcpu_devices, cpu))) in arch_vcpu_is_preempted() 447 if (pcpu_stopped(per_cpu_ptr(&pcpu_devices, cpu))) in smp_emergency_stop() 478 pcpu = per_cpu_ptr(&pcpu_devices, cpu); in smp_send_stop() 552 pcpu = per_cpu_ptr(&pcpu_devices, cpu); in smp_store_status() 724 pcpu = per_cpu_ptr(&pcpu_devices, cpu); in smp_add_core() 922 pcpu = per_cpu_ptr(&pcpu_devices, cpu); in __cpu_die() 968 ipl_pcpu = per_cpu_ptr(&pcpu_devices, 0); in smp_prepare_boot_cpu() 1028 pcpu = per_cpu_ptr(&pcpu_devices, cpu); in cpu_configure_store() 1100 struct cpu *c = per_cpu_ptr(&cpu_devices, cpu); in smp_cpu_online() 1107 struct cpu *c = per_cpu_ptr(&cpu_devices, cpu); in smp_cpu_pre_down() [all …]
|
| H A D | wti.c | 104 struct wti_state *st = per_cpu_ptr(&wti_state, cpu); in wti_pending() 133 st = per_cpu_ptr(&wti_state, cpu); in wti_show() 144 struct wti_state *st = per_cpu_ptr(&wti_state, cpu); in wti_thread_fn() 179 st = per_cpu_ptr(&wti_state, cpu); in wti_init()
|
| /linux-6.15/drivers/irqchip/ |
| H A D | irq-riscv-imsic-state.c | 190 mlocal = per_cpu_ptr(imsic->global.local, mvec->cpu); in __imsic_local_sync() 296 lpriv = per_cpu_ptr(imsic->lpriv, vec->cpu); in imsic_vector_mask() 318 lpriv = per_cpu_ptr(imsic->lpriv, vec->cpu); in imsic_vector_unmask() 342 lpriv = per_cpu_ptr(imsic->lpriv, vec->cpu); in imsic_vector_force_move_cleanup() 387 old_lpriv = per_cpu_ptr(imsic->lpriv, old_vec->cpu); in imsic_vector_move() 391 new_lpriv = per_cpu_ptr(imsic->lpriv, new_vec->cpu); in imsic_vector_move() 412 lpriv = per_cpu_ptr(imsic->lpriv, vec->cpu); in imsic_vector_debug_show() 461 lpriv = per_cpu_ptr(imsic->lpriv, cpu); in imsic_vector_alloc() 487 lpriv = per_cpu_ptr(imsic->lpriv, cpu); in imsic_local_cleanup() 510 lpriv = per_cpu_ptr(imsic->lpriv, cpu); in imsic_local_init() [all …]
|
| H A D | irq-sifive-plic.c | 123 struct plic_handler *handler = per_cpu_ptr(&plic_handlers, cpu); in plic_irq_toggle() 253 priv = per_cpu_ptr(&plic_handlers, smp_processor_id())->priv; in plic_irq_suspend() 261 struct plic_handler *handler = per_cpu_ptr(&plic_handlers, cpu); in plic_irq_suspend() 284 priv = per_cpu_ptr(&plic_handlers, smp_processor_id())->priv; in plic_irq_resume() 293 struct plic_handler *handler = per_cpu_ptr(&plic_handlers, cpu); in plic_irq_resume() 611 handler = per_cpu_ptr(&plic_handlers, cpu); in plic_probe() 659 handler = per_cpu_ptr(&plic_handlers, cpu); in plic_probe() 698 handler = per_cpu_ptr(&plic_handlers, cpu); in plic_probe()
|
| /linux-6.15/fs/squashfs/ |
| H A D | decompressor_multi_percpu.c | 39 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_create() 53 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_create() 70 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_destroy()
|
| /linux-6.15/arch/x86/kernel/cpu/ |
| H A D | aperfmperf.c | 383 per_cpu_ptr(arch_cpu_scale, cpu)->capacity = SCHED_CAPACITY_SCALE; in arch_enable_hybrid_capacity_scale() 384 per_cpu_ptr(arch_cpu_scale, cpu)->freq_ratio = arch_max_freq_ratio; in arch_enable_hybrid_capacity_scale() 414 WRITE_ONCE(per_cpu_ptr(arch_cpu_scale, cpu)->capacity, in arch_set_cpu_capacity() 416 WRITE_ONCE(per_cpu_ptr(arch_cpu_scale, cpu)->freq_ratio, in arch_set_cpu_capacity() 426 return READ_ONCE(per_cpu_ptr(arch_cpu_scale, cpu)->capacity); in arch_scale_cpu_capacity() 503 struct aperfmperf *s = per_cpu_ptr(&cpu_samples, cpu); in arch_freq_get_on_cpu()
|
| /linux-6.15/drivers/clocksource/ |
| H A D | timer-mp-csky.c | 78 struct timer_of *to = per_cpu_ptr(&csky_to, cpu); in csky_mptimer_starting_cpu() 145 to = per_cpu_ptr(&csky_to, cpu); in csky_mptimer_init() 168 to = per_cpu_ptr(&csky_to, cpu_rollback); in csky_mptimer_init()
|
| /linux-6.15/drivers/powercap/ |
| H A D | idle_inject.c | 108 iit = per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_wakeup() 154 iit = per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_fn() 280 iit = per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_stop() 311 per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_should_run()
|
| /linux-6.15/tools/testing/shared/linux/ |
| H A D | percpu.h | 10 #define per_cpu_ptr(ptr, cpu) ({ (void)(cpu); (ptr); }) macro 11 #define per_cpu(var, cpu) (*per_cpu_ptr(&(var), cpu))
|
| /linux-6.15/arch/x86/kernel/ |
| H A D | irq_64.c | 38 char *stack = (char *)per_cpu_ptr(&irq_stack_backing_store, cpu); in map_irq_stack() 64 void *va = per_cpu_ptr(&irq_stack_backing_store, cpu); in map_irq_stack()
|
| /linux-6.15/drivers/hwtracing/coresight/ |
| H A D | coresight-trace-id.c | 51 return atomic_read(per_cpu_ptr(id_map->cpu_map, cpu)); in _coresight_trace_id_read_cpu_id() 137 atomic_set(per_cpu_ptr(id_map->cpu_map, cpu), 0); in coresight_trace_id_release_all() 171 atomic_set(per_cpu_ptr(id_map->cpu_map, cpu), id); in _coresight_trace_id_get_cpu_id() 194 atomic_set(per_cpu_ptr(id_map->cpu_map, cpu), 0); in _coresight_trace_id_put_cpu_id()
|