Searched refs:vmxon (Results 1 – 5 of 5) sorted by relevance
79 vmx->vmxon = (void *)vm_vaddr_alloc_page(vm); in vcpu_alloc_vmx()80 vmx->vmxon_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()81 vmx->vmxon_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()151 *(uint32_t *)(vmx->vmxon) = vmcs_revision(); in prepare_for_vmx_operation()152 if (vmxon(vmx->vmxon_gpa)) in prepare_for_vmx_operation()
295 static inline int vmxon(uint64_t phys) in vmxon() function501 void *vmxon; member
117 bool vmxon; member237 bool vmxon; member
338 if (!vmx->nested.vmxon && !vmx->nested.smm.vmxon) in free_nested()343 vmx->nested.vmxon = false; in free_nested()344 vmx->nested.smm.vmxon = false; in free_nested()1451 if (vmx->nested.vmxon) in vmx_set_vmx_msr()3471 if (!to_vmx(vcpu)->nested.vmxon) { in nested_vmx_check_permission()5338 vmx->nested.vmxon = true; in enter_vmx_operation()5406 if (vmx->nested.vmxon) in handle_vmxon()6649 (vmx->nested.vmxon || vmx->nested.smm.vmxon)) { in vmx_get_nested_state()6666 if (vmx->nested.smm.vmxon) in vmx_get_nested_state()6870 vmx->nested.smm.vmxon = true; in vmx_set_nested_state()[all …]
2407 vmx->nested.vmxon) in vmx_set_msr()3315 if (to_vmx(vcpu)->nested.vmxon) in vmx_is_valid_cr0()3470 if (to_vmx(vcpu)->nested.vmxon && !nested_cr4_valid(vcpu, cr4)) in vmx_is_valid_cr4()8321 vmx->nested.smm.vmxon = vmx->nested.vmxon; in vmx_enter_smm()8322 vmx->nested.vmxon = false; in vmx_enter_smm()8332 if (vmx->nested.smm.vmxon) { in vmx_leave_smm()8333 vmx->nested.vmxon = true; in vmx_leave_smm()8334 vmx->nested.smm.vmxon = false; in vmx_leave_smm()8356 return to_vmx(vcpu)->nested.vmxon && !is_guest_mode(vcpu); in vmx_apic_init_signal_blocked()