| /linux-6.15/arch/loongarch/kvm/ |
| H A D | main.c | 19 static struct kvm_context __percpu *vmcs; variable 275 kvm->arch.vmcs = vmcs; in kvm_init_vmcs() 325 this_cpu_ptr(vmcs)->last_vcpu = NULL; in kvm_arch_enable_virtualization_cpu() 348 if (!vmcs) { in kvm_loongarch_env_init() 355 free_percpu(vmcs); in kvm_loongarch_env_init() 356 vmcs = NULL; in kvm_loongarch_env_init() 371 free_percpu(vmcs); in kvm_loongarch_env_init() 372 vmcs = NULL; in kvm_loongarch_env_init() 391 context = per_cpu_ptr(vmcs, cpu); in kvm_loongarch_env_init() 419 if (vmcs) in kvm_loongarch_env_exit() [all …]
|
| H A D | vcpu.c | 39 context = this_cpu_ptr(vcpu->kvm->arch.vmcs); in kvm_save_host_pmu() 54 context = this_cpu_ptr(vcpu->kvm->arch.vmcs); in kvm_restore_host_pmu() 1567 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in kvm_arch_vcpu_destroy() 1589 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in _kvm_vcpu_load()
|
| /linux-6.15/arch/x86/kvm/vmx/ |
| H A D | vmx_ops.h | 15 void vmclear_error(struct vmcs *vmcs, u64 phys_addr); 16 void vmptrld_error(struct vmcs *vmcs, u64 phys_addr); 289 static inline void vmcs_clear(struct vmcs *vmcs) in vmcs_clear() argument 291 u64 phys_addr = __pa(vmcs); in vmcs_clear() 293 vmx_asm1(vmclear, "m"(phys_addr), vmcs, phys_addr); in vmcs_clear() 296 static inline void vmcs_load(struct vmcs *vmcs) in vmcs_load() argument 298 u64 phys_addr = __pa(vmcs); in vmcs_load() 303 vmx_asm1(vmptrld, "m"(phys_addr), vmcs, phys_addr); in vmcs_load()
|
| H A D | vmcs.h | 21 struct vmcs { struct 27 DECLARE_PER_CPU(struct vmcs *, current_vmcs); argument 62 struct vmcs *vmcs; member 63 struct vmcs *shadow_vmcs;
|
| H A D | vmx.h | 596 static inline u##bits __##lname##_controls_get(struct loaded_vmcs *vmcs) \ 598 return vmcs->controls_shadow.lname; \ 704 struct vmcs *alloc_vmcs_cpu(bool shadow, int cpu, gfp_t flags); 705 void free_vmcs(struct vmcs *vmcs); 710 static inline struct vmcs *alloc_vmcs(bool shadow) in alloc_vmcs()
|
| H A D | vmx.c | 468 noinline void vmclear_error(struct vmcs *vmcs, u64 phys_addr) in vmclear_error() argument 474 noinline void vmptrld_error(struct vmcs *vmcs, u64 phys_addr) in vmptrld_error() argument 773 vmcs_clear(v->vmcs); in vmx_emergency_disable_virtualization_cpu() 1449 struct vmcs *prev; in vmx_vcpu_load_vmcs() 2912 struct vmcs *vmcs; in alloc_vmcs_cpu() local 2928 return vmcs; in alloc_vmcs_cpu() 2931 void free_vmcs(struct vmcs *vmcs) in free_vmcs() argument 2941 if (!loaded_vmcs->vmcs) in free_loaded_vmcs() 2954 if (!loaded_vmcs->vmcs) in alloc_loaded_vmcs() 2998 struct vmcs *vmcs; in alloc_kvm_area() local [all …]
|
| H A D | nested.c | 292 static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs) in vmx_switch_vmcs() argument 298 if (WARN_ON_ONCE(vmx->loaded_vmcs == vmcs)) in vmx_switch_vmcs() 303 vmx->loaded_vmcs = vmcs; in vmx_switch_vmcs() 1595 struct vmcs *shadow_vmcs = vmx->vmcs01.shadow_vmcs; in copy_shadow_to_vmcs12() 1615 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_shadow_to_vmcs12() 1630 struct vmcs *shadow_vmcs = vmx->vmcs01.shadow_vmcs; in copy_vmcs12_to_shadow() 1651 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_vmcs12_to_shadow() 5288 static struct vmcs *alloc_shadow_vmcs(struct kvm_vcpu *vcpu) in alloc_shadow_vmcs() 5752 vmcs_load(vmx->loaded_vmcs->vmcs); in handle_vmwrite()
|
| /linux-6.15/tools/perf/util/intel-pt-decoder/ |
| H A D | intel-pt-decoder.c | 167 uint64_t vmcs; member 2358 data->vmcs = true; in intel_pt_vm_psb_lookahead_cb() 2526 .vmcs = NO_VMCS, in intel_pt_translate_vm_tsc_offset() 2601 uint64_t vmcs; in intel_pt_vm_tm_corr_tsc() local 2629 vmcs = data->vmcs ? data->vmcs_packet.payload : decoder->vmcs; in intel_pt_vm_tm_corr_tsc() 2630 if (vmcs == NO_VMCS) in intel_pt_vm_tm_corr_tsc() 2631 vmcs = 0; in intel_pt_vm_tm_corr_tsc() 2754 uint64_t vmcs; in intel_pt_vm_tm_corr_pebs_tsc() local 2756 vmcs = decoder->vmcs; in intel_pt_vm_tm_corr_pebs_tsc() 2757 if (vmcs == NO_VMCS) in intel_pt_vm_tm_corr_pebs_tsc() [all …]
|
| H A D | intel-pt-decoder.h | 210 uint64_t vmcs; member 280 struct intel_pt_vmcs_info *(*findnew_vmcs_info)(void *data, uint64_t vmcs);
|
| /linux-6.15/tools/testing/selftests/kvm/lib/x86/ |
| H A D | vmx.c | 84 vmx->vmcs = (void *)vm_vaddr_alloc_page(vm); in vcpu_alloc_vmx() 85 vmx->vmcs_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx() 86 vmx->vmcs_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx() 161 *(uint32_t *)(vmx->vmcs) = vmcs_revision(); in load_vmcs()
|
| /linux-6.15/tools/perf/util/ |
| H A D | intel-pt.c | 317 u64 vmcs, in intel_pt_findnew_vmcs() argument 328 if (v->vmcs == vmcs) in intel_pt_findnew_vmcs() 331 if (vmcs < v->vmcs) in intel_pt_findnew_vmcs() 339 v->vmcs = vmcs; in intel_pt_findnew_vmcs() 350 static struct intel_pt_vmcs_info *intel_pt_findnew_vmcs_info(void *data, uint64_t vmcs) in intel_pt_findnew_vmcs_info() argument 355 if (!vmcs && !pt->dflt_tsc_offset) in intel_pt_findnew_vmcs_info() 358 return intel_pt_findnew_vmcs(&pt->vmcs_info, vmcs, pt->dflt_tsc_offset); in intel_pt_findnew_vmcs_info() 4105 u64 tsc_offset, vmcs; in intel_pt_parse_vm_tm_corr_arg() local 4125 vmcs = strtoull(p, &p, 0); in intel_pt_parse_vm_tm_corr_arg() 4128 if (!vmcs) in intel_pt_parse_vm_tm_corr_arg() [all …]
|
| /linux-6.15/arch/loongarch/include/asm/ |
| H A D | kvm_host.h | 131 struct kvm_context __percpu *vmcs; member
|
| /linux-6.15/tools/testing/selftests/kvm/include/x86/ |
| H A D | vmx.h | 505 void *vmcs; member
|
| H A D | evmcs.h | 248 static inline int evmcs_vmptrld(uint64_t vmcs_pa, void *vmcs) in evmcs_vmptrld() argument 253 current_evmcs = vmcs; in evmcs_vmptrld()
|