Lines Matching refs:root_hpa

2131 		 * the active root_hpa is valid here.
2133 BUG_ON(root != vcpu->arch.mmu->root_hpa);
2147 shadow_walk_init_using_root(iterator, vcpu, vcpu->arch.mmu->root_hpa,
2864 if (WARN_ON(!VALID_PAGE(vcpu->arch.mmu->root_hpa)))
3134 static void mmu_free_root_page(struct kvm *kvm, hpa_t *root_hpa,
3139 if (!VALID_PAGE(*root_hpa))
3142 sp = to_shadow_page(*root_hpa & PT64_BASE_ADDR_MASK);
3153 *root_hpa = INVALID_PAGE;
3168 if (!(free_active_root && VALID_PAGE(mmu->root_hpa))) {
3188 mmu_free_root_page(kvm, &mmu->root_hpa, &invalid_list);
3196 mmu->root_hpa = INVALID_PAGE;
3246 vcpu->arch.mmu->root_hpa = root;
3253 vcpu->arch.mmu->root_hpa = root;
3264 vcpu->arch.mmu->root_hpa = __pa(vcpu->arch.mmu->pae_root);
3292 MMU_WARN_ON(VALID_PAGE(vcpu->arch.mmu->root_hpa));
3298 vcpu->arch.mmu->root_hpa = root;
3344 vcpu->arch.mmu->root_hpa = __pa(vcpu->arch.mmu->pae_root);
3366 vcpu->arch.mmu->root_hpa = __pa(vcpu->arch.mmu->lm_root);
3391 if (!VALID_PAGE(vcpu->arch.mmu->root_hpa))
3397 hpa_t root = vcpu->arch.mmu->root_hpa;
3525 if (!VALID_PAGE(vcpu->arch.mmu->root_hpa)) {
3530 if (is_tdp_mmu_root(vcpu->kvm, vcpu->arch.mmu->root_hpa))
3716 if (!is_tdp_mmu_root(vcpu->kvm, vcpu->arch.mmu->root_hpa)) {
3743 if (is_tdp_mmu_root(vcpu->kvm, vcpu->arch.mmu->root_hpa))
3842 * If a matching root was found, it is assigned to kvm_mmu->root_hpa and true is
3844 * Otherwise, the LRU root from the cache is assigned to kvm_mmu->root_hpa and
3855 root.hpa = mmu->root_hpa;
3867 mmu->root_hpa = root.hpa;
3926 to_shadow_page(vcpu->arch.mmu->root_hpa));
4809 vcpu->arch.mmu->root_hpa = INVALID_PAGE;
4865 WARN_ON(VALID_PAGE(vcpu->arch.root_mmu.root_hpa));
4867 WARN_ON(VALID_PAGE(vcpu->arch.guest_mmu.root_hpa));
5072 if (WARN_ON(!VALID_PAGE(vcpu->arch.mmu->root_hpa)))
5127 gva_t gva, hpa_t root_hpa)
5143 if (root_hpa == INVALID_PAGE) {
5144 mmu->invlpg(vcpu, gva, mmu->root_hpa);
5161 mmu->invlpg(vcpu, gva, root_hpa);
5182 mmu->invlpg(vcpu, gva, mmu->root_hpa);
5313 mmu->root_hpa = INVALID_PAGE;