| /xnu-11215/osfmk/i386/ |
| H A D | cpu.c | 95 cpu_data_t *cdp = current_cpu_datap(); in cpu_sleep() 106 cpu_data_t *cdp = current_cpu_datap(); in cpu_init() 185 cpu_data_t *cdp = current_cpu_datap(); in cpu_machine_init() 201 return current_cpu_datap()->cpu_processor; in current_processor() 214 return ¤t_cpu_datap()->cpu_pending_ast; in ast_pending() 241 return current_cpu_datap()->cpu_type; in cpu_type() 247 return current_cpu_datap()->cpu_subtype; in cpu_subtype() 253 return current_cpu_datap()->cpu_threadtype; in cpu_threadtype()
|
| H A D | mp_desc.h | 95 #define current_gdt() (current_cpu_datap()->cpu_desc_index.cdi_gdtb.ptr) 96 #define current_idt() (current_cpu_datap()->cpu_desc_index.cdi_idtb.ptr) 97 #define current_ldt() (current_cpu_datap()->cpu_desc_index.cdi_ldtb) 98 #define current_ktss() (current_cpu_datap()->cpu_desc_index.cdi_ktssb) 99 #define current_sstk() (current_cpu_datap()->cpu_desc_index.cdi_sstkb)
|
| H A D | i386_timer.c | 80 pp = current_cpu_datap(); in timer_intr() 183 pp = current_cpu_datap(); in timer_set_deadline() 209 pp = current_cpu_datap(); in timer_resync_deadlines() 269 pp = current_cpu_datap(); in timer_queue_expire_local() 300 pp = current_cpu_datap(); in timer_queue_expire_rescan() 340 cpu_data_t *cdp = current_cpu_datap(); in timer_queue_assign() 362 if (queue == ¤t_cpu_datap()->rtclock_timer.queue) { in timer_queue_cancel() 381 cpu_data_t *cdp = current_cpu_datap(); in timer_queue_migrate_cpu()
|
| H A D | acpi.c | 137 if (current_cpu_datap()->cpu_hibernate) { in acpi_hibernate() 188 cpu_data_t *cdp = current_cpu_datap(); in acpi_sleep_kernel() 285 if (current_cpu_datap()->cpu_hibernate) { in acpi_sleep_kernel() 294 cpu_syscall_init(current_cpu_datap()); in acpi_sleep_kernel() 367 pmCPUMarkRunning(current_cpu_datap()); in acpi_sleep_kernel() 406 if (current_cpu_datap()->cpu_hibernate) { in ml_hibernate_active_post() 410 current_cpu_datap()->cpu_hibernate = 0; in ml_hibernate_active_post() 456 if (current_cpu_datap()->cpu_hibernate) { in acpi_idle_kernel()
|
| H A D | mp.c | 586 pmSafeMode(¤t_cpu_datap()->lcpu, PM_SAFE_FL_SAFE); in NMIInterruptHandler() 631 pmSafeMode(¤t_cpu_datap()->lcpu, PM_SAFE_FL_SAFE); in NMIInterruptHandler() 633 current_cpu_datap()->cpu_NMI_acknowledged = TRUE; in NMIInterruptHandler() 634 i_bit_clear(MP_KDP, ¤t_cpu_datap()->cpu_signals); in NMIInterruptHandler() 883 current_cpu_datap()->cpu_rendezvous_in_progress = TRUE; in mp_rendezvous_action() 928 current_cpu_datap()->cpu_rendezvous_in_progress = FALSE; in mp_rendezvous_action() 1023 current_cpu_datap()->cpu_iflag = intr; in setup_disable_intrs() 1548 cpu_data_t *cdp = current_cpu_datap(); in i386_activate_cpu() 1567 cpu_data_t *cdp = current_cpu_datap(); in i386_deactivate_cpu() 1686 pmSafeMode(¤t_cpu_datap()->lcpu, PM_SAFE_FL_SAFE); in mp_kdp_enter() [all …]
|
| H A D | pmCPU.c | 105 cpu_data_t *my_cpu = current_cpu_datap(); in machine_idle() 239 cpu_data_t *cpup = current_cpu_datap(); in pmCPUHalt() 322 cpu_data_t *cpup = current_cpu_datap(); in pmGetMyLogicalCPU() 336 cpu_data_t *cpup = current_cpu_datap(); in pmGetMyCore() 350 cpu_data_t *cpup = current_cpu_datap(); in pmGetMyDie() 364 cpu_data_t *cpup = current_cpu_datap(); in pmGetMyPackage() 495 cpu_data_t *cpup = current_cpu_datap(); in pmCPUMarkRunning() 784 current_cpu_datap()->cpu_nthread = nthread; in thread_tell_urgency() 910 if ((uint32_t)cpu == current_cpu_datap()->lcpu.cpu_num) { in pmReSyncDeadlines() 1024 cpu_data_t *my_cpu = current_cpu_datap(); in machine_track_platform_idle()
|
| H A D | thread.h | 212 stack_depth = current_cpu_datap()->cpu_kernel_stack in current_kernel_stack_depth() 219 "depth limit: 0x%016lx", current_cpu_datap()->cpu_kernel_stack, in current_kernel_stack_depth()
|
| H A D | machine_routines.c | 441 x86_saved_state_t *state = current_cpu_datap()->cpu_int_state; in ml_did_interrupt_userspace() 465 *pidlep = (current_cpu_datap()->lcpu.package->num_idle == topoParms.nLThreadsPerPackage); in ml_get_power_state() 1184 current_cpu_datap()->cpu_ldt == KERNEL_LDT) { in ml_cpu_set_ldt() 1189 current_cpu_datap()->cpu_ldt = selector; in ml_cpu_set_ldt() 1201 return current_cpu_datap()->cpu_int_event_time; in ml_cpu_int_event_time() 1210 return local - (current_cpu_datap()->cpu_int_stack_top - INTSTACK_SIZE); in ml_stack_remaining() 1224 return current_cpu_datap()->cpu_int_stack_top - INTSTACK_SIZE; in ml_stack_base() 1245 return ¤t_cpu_datap()->cpu_kcov_data; in current_kcov_data()
|
| H A D | bsd_i386.c | 872 NULL != current_cpu_datap()->cpu_int_state && in find_kern_regs() 873 !(USER_STATE(thread) == current_cpu_datap()->cpu_int_state && in find_kern_regs() 874 current_cpu_datap()->cpu_interrupt_level == 1)) { in find_kern_regs() 875 return current_cpu_datap()->cpu_int_state; in find_kern_regs() 886 return current_cpu_datap()->cpu_int_stack_top; in dtrace_get_cpu_int_stack_top()
|
| H A D | Diagnostics.c | 260 pkes.pkg_idle_exits = current_cpu_datap()->lcpu.package->package_idle_exits; in diagCall64() 355 cpu_data_t *cdp = current_cpu_datap(); in cpu_powerstats() 406 cpu_data_t *cdp = current_cpu_datap(); in cpu_pmc_control()
|
| H A D | lapic_native.c | 221 current_cpu_datap()->cpu_soft_apic_lvt_timer = lo; in x2apic_init() 233 return current_cpu_datap()->cpu_soft_apic_lvt_timer; in x2apic_read() 243 current_cpu_datap()->cpu_soft_apic_lvt_timer = value; in x2apic_write() 352 current_cpu_datap()->cpu_soft_apic_lvt_timer = lo; in lapic_reinit() 429 current_cpu_datap()->cpu_phys_number = cpu_to_lapic[0]; in lapic_init()
|
| H A D | hibernate_i386.c | 267 if (current_cpu_datap()->cpu_hibernate) { in hibernate_vm_lock() 277 if (current_cpu_datap()->cpu_hibernate) { in hibernate_vm_unlock()
|
| H A D | machine_check.c | 248 mca_save_state(current_cpu_datap()->cpu_mca_state); in mca_check_save() 311 mca_state_t *mca_state = current_cpu_datap()->cpu_mca_state; in mca_dump()
|
| H A D | cpu_data.h | 425 current_cpu_datap(void) in current_cpu_datap() function 526 cpu_data_t *cdata = current_cpu_datap(); in pltrace_internal() 564 cdata = current_cpu_datap(); in traptrace_start()
|
| H A D | mp_native.c | 104 volatile int *my_word = ¤t_cpu_datap()->cpu_signals; in handle_pending_TLB_flushes()
|
| H A D | cpu_topology.c | 309 cachep = current_cpu_datap()->lcpu.caches[level - 1]; in ml_cpu_cache_size() 324 cachep = current_cpu_datap()->lcpu.caches[level - 1]; in ml_cpu_cache_sharing()
|
| H A D | cpu_threads.h | 55 #define x86_lcpu() (¤t_cpu_datap()->lcpu)
|
| H A D | locks_i386_opt.c | 92 if (current_cpu_datap()->cpu_hibernate) { in lck_mtx_check_preemption()
|
| /xnu-11215/osfmk/kern/ |
| H A D | kpc.h | 65 #define FIXED_RELOAD(ctr) (current_cpu_datap()->cpu_kpc_reload[(ctr)]) 67 #define CONFIGURABLE_RELOAD(ctr) (current_cpu_datap()->cpu_kpc_reload[(ctr) + kpc_fi… 71 #define FIXED_SHADOW(ctr) (current_cpu_datap()->cpu_kpc_shadow[(ctr)]) 73 #define CONFIGURABLE_SHADOW(ctr) (current_cpu_datap()->cpu_kpc_shadow[(ctr) + kpc_fi…
|
| H A D | kpc_thread.c | 127 cpu = current_cpu_datap(); in kpc_update_thread_counters()
|
| H A D | hv_support_kext.c | 263 return current_cpu_datap()->cpu_pending_ast != 0; in hv_ast_pending()
|
| /xnu-11215/osfmk/i386/vmx/ |
| H A D | vmx_cpu.c | 128 vmx_specs_t *specs = ¤t_cpu_datap()->cpu_vmx.specs; in vmx_cpu_init() 169 vmx_cpu_t *cpu = ¤t_cpu_datap()->cpu_vmx; in vmx_on() 209 vmx_cpu_t *cpu = ¤t_cpu_datap()->cpu_vmx; in vmx_off() 391 vmx_cpu_t *cpu = ¤t_cpu_datap()->cpu_vmx; in vmx_resume()
|
| /xnu-11215/osfmk/arm/ |
| H A D | model_dep.c | 386 cpu_data_t *current_cpu_datap = cpu_datap(cpu); in panic_display_last_pc_lr() local 388 if (current_cpu_datap == NULL) { in panic_display_last_pc_lr() 392 if (current_cpu_datap == getCpuDatap()) { in panic_display_last_pc_lr() 402 current_cpu_datap->ipi_pc, (uint64_t)VM_KERNEL_STRIP_PTR(current_cpu_datap->ipi_lr), in panic_display_last_pc_lr() 403 (uint64_t)VM_KERNEL_STRIP_PTR(current_cpu_datap->ipi_fp)); in panic_display_last_pc_lr() 1322 current_cpu_datap()->ipi_pc = (uint64_t)get_saved_state_pc(regs); in DebuggerXCall() 1323 current_cpu_datap()->ipi_lr = (uint64_t)get_saved_state_lr(regs); in DebuggerXCall() 1324 current_cpu_datap()->ipi_fp = (uint64_t)get_saved_state_fp(regs); in DebuggerXCall()
|
| H A D | cpu_data.h | 124 #define current_cpu_datap() getCpuDatap() macro
|
| /xnu-11215/osfmk/kdp/ml/x86_64/ |
| H A D | kdp_machdep.c | 438 if (current_cpu_datap()->cpu_fatal_trap_state) { in kdp_i386_trap() 439 current_cpu_datap()->cpu_post_fatal_trap_state = saved_state; in kdp_i386_trap() 440 saved_state = current_cpu_datap()->cpu_fatal_trap_state; in kdp_i386_trap()
|