hv: mmu: add strict check for adding page table mapping

The current implement only do "only add a page table mapping for a region when
it's not mapped" check when this page table entry is a PTE entry. However, it
need to do this check for PDPTE entry and PDE entry too.

Tracked-On: #3475
Signed-off-by: Li, Fei1 <fei1.li@intel.com>
This commit is contained in:
Li, Fei1 2019-08-21 06:52:54 +08:00 committed by ACRN System Integration
parent c691c5bd3c
commit 127c73c3be

View File

@ -249,7 +249,7 @@ static void add_pte(const uint64_t *pde, uint64_t paddr_start, uint64_t vaddr_st
uint64_t *pte = pt_page + index; uint64_t *pte = pt_page + index;
if (mem_ops->pgentry_present(*pte) != 0UL) { if (mem_ops->pgentry_present(*pte) != 0UL) {
ASSERT(false, "invalid op, pte present"); pr_fatal("%s, pte 0x%llx is already present!\n", __func__, vaddr);
} else { } else {
set_pgentry(pte, paddr | prot, mem_ops); set_pgentry(pte, paddr | prot, mem_ops);
paddr += PTE_SIZE; paddr += PTE_SIZE;
@ -280,6 +280,9 @@ static void add_pde(const uint64_t *pdpte, uint64_t paddr_start, uint64_t vaddr_
uint64_t *pde = pd_page + index; uint64_t *pde = pd_page + index;
uint64_t vaddr_next = (vaddr & PDE_MASK) + PDE_SIZE; uint64_t vaddr_next = (vaddr & PDE_MASK) + PDE_SIZE;
if (pde_large(*pde) != 0UL) {
pr_fatal("%s, pde 0x%llx is already present!\n", __func__, vaddr);
} else {
if (mem_ops->pgentry_present(*pde) == 0UL) { if (mem_ops->pgentry_present(*pde) == 0UL) {
if (mem_aligned_check(paddr, PDE_SIZE) && if (mem_aligned_check(paddr, PDE_SIZE) &&
mem_aligned_check(vaddr, PDE_SIZE) && mem_aligned_check(vaddr, PDE_SIZE) &&
@ -297,6 +300,7 @@ static void add_pde(const uint64_t *pdpte, uint64_t paddr_start, uint64_t vaddr_
} }
} }
add_pte(pde, paddr, vaddr, vaddr_end, prot, mem_ops); add_pte(pde, paddr, vaddr, vaddr_end, prot, mem_ops);
}
if (vaddr_next >= vaddr_end) { if (vaddr_next >= vaddr_end) {
break; /* done */ break; /* done */
} }
@ -322,6 +326,9 @@ static void add_pdpte(const uint64_t *pml4e, uint64_t paddr_start, uint64_t vadd
uint64_t *pdpte = pdpt_page + index; uint64_t *pdpte = pdpt_page + index;
uint64_t vaddr_next = (vaddr & PDPTE_MASK) + PDPTE_SIZE; uint64_t vaddr_next = (vaddr & PDPTE_MASK) + PDPTE_SIZE;
if (pdpte_large(*pdpte) != 0UL) {
pr_fatal("%s, pdpte 0x%llx is already present!\n", __func__, vaddr);
} else {
if (mem_ops->pgentry_present(*pdpte) == 0UL) { if (mem_ops->pgentry_present(*pdpte) == 0UL) {
if (mem_aligned_check(paddr, PDPTE_SIZE) && if (mem_aligned_check(paddr, PDPTE_SIZE) &&
mem_aligned_check(vaddr, PDPTE_SIZE) && mem_aligned_check(vaddr, PDPTE_SIZE) &&
@ -339,6 +346,7 @@ static void add_pdpte(const uint64_t *pml4e, uint64_t paddr_start, uint64_t vadd
} }
} }
add_pde(pdpte, paddr, vaddr, vaddr_end, prot, mem_ops); add_pde(pdpte, paddr, vaddr, vaddr_end, prot, mem_ops);
}
if (vaddr_next >= vaddr_end) { if (vaddr_next >= vaddr_end) {
break; /* done */ break; /* done */
} }