Searched refs:cpu_cnt (Results 1 – 12 of 12) sorted by relevance
| /linux-6.15/block/ |
| H A D | blk-cgroup-rwstat.h | 27 struct percpu_counter cpu_cnt[BLKG_RWSTAT_NR]; member 39 percpu_counter_sum_positive(&rwstat->cpu_cnt[idx]); in blkg_rwstat_read_counter() 67 cnt = &rwstat->cpu_cnt[BLKG_RWSTAT_DISCARD]; in blkg_rwstat_add() 69 cnt = &rwstat->cpu_cnt[BLKG_RWSTAT_WRITE]; in blkg_rwstat_add() 71 cnt = &rwstat->cpu_cnt[BLKG_RWSTAT_READ]; in blkg_rwstat_add() 76 cnt = &rwstat->cpu_cnt[BLKG_RWSTAT_SYNC]; in blkg_rwstat_add() 78 cnt = &rwstat->cpu_cnt[BLKG_RWSTAT_ASYNC]; in blkg_rwstat_add() 97 percpu_counter_sum_positive(&rwstat->cpu_cnt[i]); in blkg_rwstat_read() 125 percpu_counter_set(&rwstat->cpu_cnt[i], 0); in blkg_rwstat_reset() 144 sum[i] = percpu_counter_sum_positive(&from->cpu_cnt[i]); in blkg_rwstat_add_aux()
|
| H A D | blk-cgroup-rwstat.c | 12 ret = percpu_counter_init_many(rwstat->cpu_cnt, 0, gfp, BLKG_RWSTAT_NR); in blkg_rwstat_init() 24 percpu_counter_destroy_many(rwstat->cpu_cnt, BLKG_RWSTAT_NR); in blkg_rwstat_exit()
|
| H A D | bfq-cgroup.c | 23 ret = percpu_counter_init(&stat->cpu_cnt, 0, gfp); in bfq_stat_init() 33 percpu_counter_destroy(&stat->cpu_cnt); in bfq_stat_exit() 46 percpu_counter_add_batch(&stat->cpu_cnt, val, BLKG_STAT_CPU_BATCH); in bfq_stat_add() 55 return percpu_counter_sum_positive(&stat->cpu_cnt); in bfq_stat_read() 64 percpu_counter_set(&stat->cpu_cnt, 0); in bfq_stat_reset()
|
| H A D | bfq-iosched.h | 916 struct percpu_counter cpu_cnt; member
|
| /linux-6.15/tools/testing/selftests/bpf/prog_tests/ |
| H A D | get_branch_snapshot.c | 7 static int cpu_cnt; variable 46 cpu_cnt = libbpf_num_possible_cpus(); in create_perf_events() 47 pfd_array = malloc(sizeof(int) * cpu_cnt); in create_perf_events() 49 cpu_cnt = 0; in create_perf_events() 53 for (cpu = 0; cpu < cpu_cnt; cpu++) { in create_perf_events() 67 for (cpu = 0; cpu < cpu_cnt; cpu++) { in close_perf_events()
|
| /linux-6.15/tools/testing/selftests/bpf/benchs/ |
| H A D | bench_ringbufs.c | 470 int cpu_cnt; /* number of allocated CPU buffers */ member 491 cnt = epoll_wait(pb->epoll_fd, pb->events, pb->cpu_cnt, -1); in perfbuf_custom_consumer()
|
| /linux-6.15/tools/bpf/bpftool/ |
| H A D | map_perf_ring.c | 186 opts.cpu_cnt = ctx.all_cpus ? 0 : 1; in do_event_pipe()
|
| /linux-6.15/tools/power/x86/intel-speed-select/ |
| H A D | isst-config.c | 684 static int cpu_cnt[MAX_PACKAGE_COUNT][MAX_DIE_PER_PACKAGE][MAX_PUNIT_PER_DIE]; variable 709 return cpu_cnt[id->pkg][id->die][id->punit]; in get_cpu_count() 818 cpu_cnt[pkg_id][die_id][punit_id]++; in create_cpu_map() 848 cpu_set_t *core_cpumask, int *cpu_cnt) in set_cpu_mask_from_punit_coremask() argument 855 *cpu_cnt = 0; in set_cpu_mask_from_punit_coremask() 875 *cpu_cnt = cnt; in set_cpu_mask_from_punit_coremask()
|
| H A D | isst.h | 233 int *cpu_cnt);
|
| /linux-6.15/tools/lib/bpf/ |
| H A D | libbpf.c | 13229 int cpu_cnt; member 13401 p.cpu_cnt = OPTS_GET(opts, cpu_cnt, 0); in perf_buffer__new_raw() 13469 if (p->cpu_cnt > 0) { in __perf_buffer__new() 13470 pb->cpu_cnt = p->cpu_cnt; in __perf_buffer__new() 13473 if (pb->cpu_cnt < 0) { in __perf_buffer__new() 13474 err = pb->cpu_cnt; in __perf_buffer__new() 13543 pb->cpu_cnt = j; in __perf_buffer__new() 13645 return pb->cpu_cnt; in perf_buffer__buffer_cnt() 13657 if (buf_idx >= pb->cpu_cnt) in perf_buffer__buffer_fd() 13671 if (buf_idx >= pb->cpu_cnt) in perf_buffer__buffer() [all …]
|
| H A D | libbpf.h | 1594 int cpu_cnt; member
|
| /linux-6.15/drivers/scsi/lpfc/ |
| H A D | lpfc_init.c | 12992 unsigned int cpu = 0, cpu_cnt = 0, cpu_select = nr_cpu_ids; in lpfc_sli4_enable_msix() local 13005 cpu_cnt = cpumask_weight(aff_mask); in lpfc_sli4_enable_msix() 13006 vectors = min(phba->cfg_irq_chann, cpu_cnt); in lpfc_sli4_enable_msix()
|