Home
last modified time | relevance | path

Searched refs:rcu_data (Results 1 – 10 of 10) sorted by relevance

/linux-6.15/kernel/rcu/
H A Dtree_nocb.h194 struct rcu_data *rdp, in __wake_nocb_gp()
613 static void nocb_gp_toggle_rdp(struct rcu_data *rdp_gp, struct rcu_data *rdp) in nocb_gp_toggle_rdp()
864 struct rcu_data *rdp = arg; in rcu_nocb_gp_kthread()
1118 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_nocb_cpu_deoffload()
1187 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_nocb_cpu_offload()
1225 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in lazy_rcu_shrink_count()
1260 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in lazy_rcu_shrink_scan()
1298 struct rcu_data *rdp; in rcu_init_nohz()
1382 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_spawn_cpu_nocb_kthread()
1383 struct rcu_data *rdp_gp; in rcu_spawn_cpu_nocb_kthread()
[all …]
H A Dtree.h178 struct rcu_data { struct
481 static bool rcu_is_callbacks_kthread(struct rcu_data *rdp);
486 static void zero_cpu_stall_ticks(struct rcu_data *rdp);
490 static bool wake_nocb_gp(struct rcu_data *rdp, bool force);
498 static bool do_nocb_deferred_wakeup(struct rcu_data *rdp);
499 static void rcu_boot_init_nocb_percpu_data(struct rcu_data *rdp);
501 static void show_rcu_nocb_state(struct rcu_data *rdp);
502 static void rcu_nocb_lock(struct rcu_data *rdp);
503 static void rcu_nocb_unlock(struct rcu_data *rdp);
504 static void rcu_nocb_unlock_irqrestore(struct rcu_data *rdp,
[all …]
H A Dtree_plugin.h33 rdp == this_cpu_ptr(&rcu_data)) || in rcu_rdp_is_offloaded()
327 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_note_context_switch()
485 struct rcu_data *rdp; in rcu_preempt_deferred_qs_irqrestore()
495 rdp = this_cpu_ptr(&rcu_data); in rcu_preempt_deferred_qs_irqrestore()
627 struct rcu_data *rdp; in rcu_preempt_deferred_qs_handler()
653 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_read_unlock_special()
795 struct rcu_data *rdp; in dump_blkd_tasks()
834 struct rcu_data *rdp; in rcu_read_unlock_strict()
847 rdp = this_cpu_ptr(&rcu_data); in rcu_read_unlock_strict()
965 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_preempt_deferred_qs()
[all …]
H A Dtree.c222 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_get_n_cbs_cpu()
585 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_irq_work_resched()
648 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in __rcu_irq_enter_check_tick()
1053 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_future_gp_cleanup()
2791 struct rcu_data *rdp = raw_cpu_ptr(&rcu_data); in rcu_core()
3624 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_pending()
3745 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_barrier_handler()
3958 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_cpu_online()
4090 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_boot_init_percpu_data()
4341 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcutree_report_cpu_dead()
[all …]
H A Dtree_exp.h238 struct rcu_data *rdp; in rcu_report_exp_cpu_mult()
248 rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_report_exp_cpu_mult()
296 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, raw_smp_processor_id()); in exp_funnel_lock()
365 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in __sync_rcu_exp_select_node_cpus()
407 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in __sync_rcu_exp_select_node_cpus()
575 struct rcu_data *rdp; in synchronize_rcu_expedited_stall()
627 struct rcu_data *rdp; in synchronize_rcu_expedited_wait()
749 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_exp_handler()
872 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in rcu_exp_handler()
893 struct rcu_data *rdp; in sync_sched_exp_online_cleanup()
[all …]
H A Dtree_stall.h193 struct rcu_data *rdp; in rcu_iw_handler()
196 rdp = container_of(iwp, struct rcu_data, rcu_iw); in rcu_iw_handler()
423 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in print_cpu_stat_info()
464 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in print_cpu_stall_info()
527 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_check_gp_kthread_starvation()
655 struct rcu_data *rdp = this_cpu_ptr(&rcu_data); in print_cpu_stall()
882 struct rcu_data *rdp; in show_rcu_gp_kthreads()
918 rdp = per_cpu_ptr(&rcu_data, cpu); in show_rcu_gp_kthreads()
928 rdp = per_cpu_ptr(&rcu_data, cpu); in show_rcu_gp_kthreads()
1007 struct rcu_data *rdp; in rcu_fwd_progress_check()
[all …]
/linux-6.15/Documentation/RCU/Design/Data-Structures/
H A DData-Structures.rst29 to 16 ``rcu_data`` structures associated with it, so that there are
42 Quiescent states are recorded by the per-CPU ``rcu_data`` structures,
155 ``rcu_node`` and ``rcu_data`` structures, tracks grace periods,
190 and ``rcu_data`` data structures.
206 Relationship to rcu_node and rcu_data Structures
215 3 struct rcu_data __percpu *rda;
260 corresponding CPU's ``rcu_data`` structure.
287 (down the tree from the root to the leaves) to ``rcu_data``.
434 thought of as having ``rcu_data`` structures as their children.
779 The ``rcu_data`` Structure
[all …]
/linux-6.15/Documentation/RCU/
H A Drcubarrier.rst238 4 struct rcu_data *rdp = &per_cpu(rcu_data, cpu);
246 Lines 3 and 4 locate RCU's internal per-CPU rcu_data structure,
/linux-6.15/Documentation/RCU/Design/Memory-Ordering/
H A DTree-RCU-Memory-Ordering.rst205 4 struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
/linux-6.15/tools/memory-model/Documentation/
H A Dsimple.txt127 within its instance of the per-CPU rcu_data structure, and then uses data