Home
last modified time | relevance | path

Searched refs:node_mask (Results 1 – 8 of 8) sorted by relevance

/linux-6.15/tools/perf/util/
H A Dmmap.c101 unsigned long *node_mask; in perf_mmap__aio_bind() local
109 node_mask = bitmap_zalloc(node_index + 1); in perf_mmap__aio_bind()
110 if (!node_mask) { in perf_mmap__aio_bind()
114 __set_bit(node_index, node_mask); in perf_mmap__aio_bind()
115 if (mbind(data, mmap_len, MPOL_BIND, node_mask, node_index + 1 + 1, 0)) { in perf_mmap__aio_bind()
120 bitmap_free(node_mask); in perf_mmap__aio_bind()
/linux-6.15/net/netfilter/
H A Dxt_cluster.c118 return !!((1 << hash) & info->node_mask) ^ in xt_cluster_mt()
132 if (info->node_mask >= (1ULL << info->total_nodes)) { in xt_cluster_mt_checkentry()
/linux-6.15/include/uapi/linux/netfilter/
H A Dxt_cluster.h13 __u32 node_mask; member
/linux-6.15/tools/perf/bench/
H A Dnuma.c396 struct bitmask *node_mask; in bind_to_memnode() local
402 node_mask = numa_allocate_nodemask(); in bind_to_memnode()
403 BUG_ON(!node_mask); in bind_to_memnode()
405 numa_bitmask_clearall(node_mask); in bind_to_memnode()
406 numa_bitmask_setbit(node_mask, node); in bind_to_memnode()
408 ret = set_mempolicy(MPOL_BIND, node_mask->maskp, node_mask->size + 1); in bind_to_memnode()
409 dprintf("binding to node %d, mask: %016lx => %d\n", node, *node_mask->maskp, ret); in bind_to_memnode()
411 numa_bitmask_free(node_mask); in bind_to_memnode()
/linux-6.15/drivers/scsi/
H A Dstorvsc_drv.c1398 const struct cpumask *node_mask; in get_og_chn() local
1415 node_mask = cpumask_of_node(cpu_to_node(q_num)); in get_og_chn()
1419 if (cpumask_test_cpu(tgt_cpu, node_mask)) in get_og_chn()
1432 if (!cpumask_test_cpu(tgt_cpu, node_mask)) in get_og_chn()
1453 const struct cpumask *node_mask; in storvsc_do_io() local
1475 node_mask = cpumask_of_node(cpu_to_node(q_num)); in storvsc_do_io()
1478 if (!cpumask_test_cpu(tgt_cpu, node_mask)) in storvsc_do_io()
1509 if (cpumask_test_cpu(tgt_cpu, node_mask)) in storvsc_do_io()
/linux-6.15/drivers/infiniband/hw/hfi1/
H A Daffinity.c999 const struct cpumask *node_mask, in hfi1_get_proc_affinity() local
1110 node_mask = cpumask_of_node(node); in hfi1_get_proc_affinity()
1112 cpumask_pr_args(node_mask)); in hfi1_get_proc_affinity()
1115 cpumask_and(available_mask, hw_thread_mask, node_mask); in hfi1_get_proc_affinity()
1144 cpumask_andnot(available_mask, available_mask, node_mask); in hfi1_get_proc_affinity()
/linux-6.15/kernel/sched/
H A Dext_idle.c713 const struct cpumask *node_mask = cpumask_of_node(node); in reset_idle_masks() local
715 cpumask_and(idle_cpumask(node)->cpu, cpu_online_mask, node_mask); in reset_idle_masks()
716 cpumask_and(idle_cpumask(node)->smt, cpu_online_mask, node_mask); in reset_idle_masks()
/linux-6.15/drivers/block/mtip32xx/
H A Dmtip32xx.c3584 const struct cpumask *node_mask; in get_least_used_cpu_on_node() local
3586 node_mask = cpumask_of_node(node); in get_least_used_cpu_on_node()
3587 least_used_cpu = cpumask_first(node_mask); in get_least_used_cpu_on_node()
3591 for_each_cpu(cpu, node_mask) { in get_least_used_cpu_on_node()
3687 const struct cpumask *node_mask; in mtip_pci_probe() local
3741 node_mask = cpumask_of_node(dd->numa_node); in mtip_pci_probe()
3742 if (!cpumask_empty(node_mask)) { in mtip_pci_probe()
3743 for_each_cpu(cpu, node_mask) in mtip_pci_probe()
3751 topology_physical_package_id(cpumask_first(node_mask)), in mtip_pci_probe()