Lines Matching refs:acpi_processor_cx
59 DEFINE_PER_CPU(struct acpi_processor_cx * [CPUIDLE_STATE_MAX], acpi_cstate);
125 struct acpi_processor_cx *cx) in lapic_timer_check_state()
165 struct acpi_processor_cx *cx) in lapic_timer_needs_broadcast()
173 struct acpi_processor_cx *cstate) { } in lapic_timer_check_state()
177 struct acpi_processor_cx *cx) in lapic_timer_needs_broadcast()
314 struct acpi_processor_cx *cx) in acpi_processor_power_verify_c3()
392 static void acpi_cst_latency_sort(struct acpi_processor_cx *states, size_t length) in acpi_cst_latency_sort()
423 struct acpi_processor_cx *cx = &pr->power.states[i]; in acpi_processor_power_verify()
564 static void __cpuidle acpi_idle_do_entry(struct acpi_processor_cx *cx) in acpi_idle_do_entry()
587 struct acpi_processor_cx *cx = per_cpu(acpi_cstate[index], dev->cpu); in acpi_idle_play_dead()
622 struct acpi_processor_cx *cx, in acpi_idle_enter_bm()
625 static struct acpi_processor_cx safe_cx = { in acpi_idle_enter_bm()
684 struct acpi_processor_cx *cx = per_cpu(acpi_cstate[index], dev->cpu); in acpi_idle_enter()
713 struct acpi_processor_cx *cx = per_cpu(acpi_cstate[index], dev->cpu); in acpi_idle_enter_s2idle()
743 struct acpi_processor_cx *cx; in acpi_processor_setup_cpuidle_cx()
781 struct acpi_processor_cx *cx; in acpi_processor_setup_cstates()