Merge from vmc-playground branch:
Rename the pv_entry_t iterator from pv_list to pv_next. Besides being more correct technically (as the name seems to suggest this is a list while it is an iterator), it will also be needed by vm_radix work to avoid a nameclash on macro expansions. Sponsored by: EMC / Isilon storage division Reviewed by: alc, jeff Tested by: flo, pho, jhb, davide
This commit is contained in:
parent
e8aabc79db
commit
b38d37f7b5
@ -2222,7 +2222,7 @@ reclaim_pv_chunk(pmap_t locked_pmap, struct rwlock **lockp)
|
||||
if ((tpte & PG_A) != 0)
|
||||
vm_page_aflag_set(m, PGA_REFERENCED);
|
||||
CHANGE_PV_LIST_LOCK_TO_VM_PAGE(lockp, m);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_next);
|
||||
if (TAILQ_EMPTY(&m->md.pv_list) &&
|
||||
(m->flags & PG_FICTITIOUS) == 0) {
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
@ -2506,9 +2506,9 @@ pmap_pvh_remove(struct md_page *pvh, pmap_t pmap, vm_offset_t va)
|
||||
pv_entry_t pv;
|
||||
|
||||
rw_assert(&pvh_global_lock, RA_LOCKED);
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_next) {
|
||||
if (pmap == PV_PMAP(pv) && va == pv->pv_va) {
|
||||
TAILQ_REMOVE(&pvh->pv_list, pv, pv_list);
|
||||
TAILQ_REMOVE(&pvh->pv_list, pv, pv_next);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -2547,7 +2547,7 @@ pmap_pv_demote_pde(pmap_t pmap, vm_offset_t va, vm_paddr_t pa,
|
||||
pv = pmap_pvh_remove(pvh, pmap, va);
|
||||
KASSERT(pv != NULL, ("pmap_pv_demote_pde: pv not found"));
|
||||
m = PHYS_TO_VM_PAGE(pa);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_next);
|
||||
/* Instantiate the remaining NPTEPG - 1 pv entries. */
|
||||
PV_STAT(atomic_add_long(&pv_entry_allocs, NPTEPG - 1));
|
||||
va_last = va + NBPDR - PAGE_SIZE;
|
||||
@ -2565,7 +2565,7 @@ pmap_pv_demote_pde(pmap_t pmap, vm_offset_t va, vm_paddr_t pa,
|
||||
m++;
|
||||
KASSERT((m->oflags & VPO_UNMANAGED) == 0,
|
||||
("pmap_pv_demote_pde: page %p is not managed", m));
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_next);
|
||||
if (va == va_last)
|
||||
goto out;
|
||||
}
|
||||
@ -2613,7 +2613,7 @@ pmap_pv_promote_pde(pmap_t pmap, vm_offset_t va, vm_paddr_t pa,
|
||||
pv = pmap_pvh_remove(&m->md, pmap, va);
|
||||
KASSERT(pv != NULL, ("pmap_pv_promote_pde: pv not found"));
|
||||
pvh = pa_to_pvh(pa);
|
||||
TAILQ_INSERT_TAIL(&pvh->pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&pvh->pv_list, pv, pv_next);
|
||||
/* Free the remaining NPTEPG - 1 pv entries. */
|
||||
va_last = va + NBPDR - PAGE_SIZE;
|
||||
do {
|
||||
@ -2654,7 +2654,7 @@ pmap_try_insert_pv_entry(pmap_t pmap, vm_offset_t va, vm_page_t m,
|
||||
if ((pv = get_pv_entry(pmap, NULL)) != NULL) {
|
||||
pv->pv_va = va;
|
||||
CHANGE_PV_LIST_LOCK_TO_VM_PAGE(lockp, m);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_next);
|
||||
return (TRUE);
|
||||
} else
|
||||
return (FALSE);
|
||||
@ -2678,7 +2678,7 @@ pmap_pv_insert_pde(pmap_t pmap, vm_offset_t va, vm_paddr_t pa,
|
||||
pv->pv_va = va;
|
||||
CHANGE_PV_LIST_LOCK_TO_PHYS(lockp, pa);
|
||||
pvh = pa_to_pvh(pa);
|
||||
TAILQ_INSERT_TAIL(&pvh->pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&pvh->pv_list, pv, pv_next);
|
||||
return (TRUE);
|
||||
} else
|
||||
return (FALSE);
|
||||
@ -3157,7 +3157,7 @@ pmap_remove_all(vm_page_t m)
|
||||
vm_page_dirty(m);
|
||||
pmap_unuse_pt(pmap, pv->pv_va, *pde, &free);
|
||||
pmap_invalidate_page(pmap, pv->pv_va);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_next);
|
||||
free_pv_entry(pmap, pv);
|
||||
PMAP_UNLOCK(pmap);
|
||||
}
|
||||
@ -3602,7 +3602,7 @@ pmap_enter(pmap_t pmap, vm_offset_t va, vm_prot_t access, vm_page_t m,
|
||||
pv = get_pv_entry(pmap, &lock);
|
||||
pv->pv_va = va;
|
||||
CHANGE_PV_LIST_LOCK_TO_PHYS(&lock, pa);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_next);
|
||||
if ((newpte & PG_RW) != 0)
|
||||
vm_page_aflag_set(m, PGA_WRITEABLE);
|
||||
}
|
||||
@ -4295,7 +4295,7 @@ pmap_page_exists_quick(pmap_t pmap, vm_page_t m)
|
||||
rw_rlock(&pvh_global_lock);
|
||||
lock = VM_PAGE_TO_PV_LIST_LOCK(m);
|
||||
rw_rlock(lock);
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_next) {
|
||||
if (PV_PMAP(pv) == pmap) {
|
||||
rv = TRUE;
|
||||
break;
|
||||
@ -4306,7 +4306,7 @@ pmap_page_exists_quick(pmap_t pmap, vm_page_t m)
|
||||
}
|
||||
if (!rv && loops < 16 && (m->flags & PG_FICTITIOUS) == 0) {
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_next) {
|
||||
if (PV_PMAP(pv) == pmap) {
|
||||
rv = TRUE;
|
||||
break;
|
||||
@ -4358,7 +4358,7 @@ pmap_pvh_wired_mappings(struct md_page *pvh, int count)
|
||||
pv_entry_t pv;
|
||||
|
||||
rw_assert(&pvh_global_lock, RA_WLOCKED);
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pte = pmap_pte(pmap, pv->pv_va);
|
||||
@ -4489,7 +4489,7 @@ pmap_remove_pages(pmap_t pmap)
|
||||
if ((tpte & PG_PS) != 0) {
|
||||
pmap_resident_count_dec(pmap, NBPDR / PAGE_SIZE);
|
||||
pvh = pa_to_pvh(tpte & PG_PS_FRAME);
|
||||
TAILQ_REMOVE(&pvh->pv_list, pv, pv_list);
|
||||
TAILQ_REMOVE(&pvh->pv_list, pv, pv_next);
|
||||
if (TAILQ_EMPTY(&pvh->pv_list)) {
|
||||
for (mt = m; mt < &m[NBPDR / PAGE_SIZE]; mt++)
|
||||
if ((mt->aflags & PGA_WRITEABLE) != 0 &&
|
||||
@ -4508,7 +4508,7 @@ pmap_remove_pages(pmap_t pmap)
|
||||
}
|
||||
} else {
|
||||
pmap_resident_count_dec(pmap, 1);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_next);
|
||||
if ((m->aflags & PGA_WRITEABLE) != 0 &&
|
||||
TAILQ_EMPTY(&m->md.pv_list) &&
|
||||
(m->flags & PG_FICTITIOUS) == 0) {
|
||||
@ -4583,7 +4583,7 @@ pmap_is_modified_pvh(struct md_page *pvh)
|
||||
|
||||
rw_assert(&pvh_global_lock, RA_WLOCKED);
|
||||
rv = FALSE;
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pte = pmap_pte(pmap, pv->pv_va);
|
||||
@ -4654,7 +4654,7 @@ pmap_is_referenced_pvh(struct md_page *pvh)
|
||||
|
||||
rw_assert(&pvh_global_lock, RA_WLOCKED);
|
||||
rv = FALSE;
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pte = pmap_pte(pmap, pv->pv_va);
|
||||
@ -4695,7 +4695,7 @@ pmap_remove_write(vm_page_t m)
|
||||
if ((m->flags & PG_FICTITIOUS) != 0)
|
||||
goto small_mappings;
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_list, next_pv) {
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_next, next_pv) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
va = pv->pv_va;
|
||||
@ -4705,7 +4705,7 @@ pmap_remove_write(vm_page_t m)
|
||||
PMAP_UNLOCK(pmap);
|
||||
}
|
||||
small_mappings:
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pde = pmap_pde(pmap, pv->pv_va);
|
||||
@ -4758,7 +4758,7 @@ pmap_ts_referenced(vm_page_t m)
|
||||
if ((m->flags & PG_FICTITIOUS) != 0)
|
||||
goto small_mappings;
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_list, pvn) {
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_next, pvn) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
va = pv->pv_va;
|
||||
@ -4792,9 +4792,9 @@ pmap_ts_referenced(vm_page_t m)
|
||||
if ((pv = TAILQ_FIRST(&m->md.pv_list)) != NULL) {
|
||||
pvf = pv;
|
||||
do {
|
||||
pvn = TAILQ_NEXT(pv, pv_list);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_list);
|
||||
pvn = TAILQ_NEXT(pv, pv_next);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_next);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_next);
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pde = pmap_pde(pmap, pv->pv_va);
|
||||
@ -4846,7 +4846,7 @@ pmap_clear_modify(vm_page_t m)
|
||||
if ((m->flags & PG_FICTITIOUS) != 0)
|
||||
goto small_mappings;
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_list, next_pv) {
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_next, next_pv) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
va = pv->pv_va;
|
||||
@ -4878,7 +4878,7 @@ pmap_clear_modify(vm_page_t m)
|
||||
PMAP_UNLOCK(pmap);
|
||||
}
|
||||
small_mappings:
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pde = pmap_pde(pmap, pv->pv_va);
|
||||
@ -4915,7 +4915,7 @@ pmap_clear_reference(vm_page_t m)
|
||||
if ((m->flags & PG_FICTITIOUS) != 0)
|
||||
goto small_mappings;
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_list, next_pv) {
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_next, next_pv) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
va = pv->pv_va;
|
||||
@ -4938,7 +4938,7 @@ pmap_clear_reference(vm_page_t m)
|
||||
PMAP_UNLOCK(pmap);
|
||||
}
|
||||
small_mappings:
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pde = pmap_pde(pmap, pv->pv_va);
|
||||
|
@ -277,7 +277,7 @@ extern struct pmap kernel_pmap_store;
|
||||
*/
|
||||
typedef struct pv_entry {
|
||||
vm_offset_t pv_va; /* virtual address for mapping */
|
||||
TAILQ_ENTRY(pv_entry) pv_list;
|
||||
TAILQ_ENTRY(pv_entry) pv_next;
|
||||
} *pv_entry_t;
|
||||
|
||||
/*
|
||||
|
@ -2286,7 +2286,7 @@ pmap_pv_reclaim(pmap_t locked_pmap)
|
||||
vm_page_dirty(m);
|
||||
if ((tpte & PG_A) != 0)
|
||||
vm_page_aflag_set(m, PGA_REFERENCED);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_next);
|
||||
if (TAILQ_EMPTY(&m->md.pv_list) &&
|
||||
(m->flags & PG_FICTITIOUS) == 0) {
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
@ -2491,9 +2491,9 @@ pmap_pvh_remove(struct md_page *pvh, pmap_t pmap, vm_offset_t va)
|
||||
pv_entry_t pv;
|
||||
|
||||
rw_assert(&pvh_global_lock, RA_WLOCKED);
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_next) {
|
||||
if (pmap == PV_PMAP(pv) && va == pv->pv_va) {
|
||||
TAILQ_REMOVE(&pvh->pv_list, pv, pv_list);
|
||||
TAILQ_REMOVE(&pvh->pv_list, pv, pv_next);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -2521,7 +2521,7 @@ pmap_pv_demote_pde(pmap_t pmap, vm_offset_t va, vm_paddr_t pa)
|
||||
pv = pmap_pvh_remove(pvh, pmap, va);
|
||||
KASSERT(pv != NULL, ("pmap_pv_demote_pde: pv not found"));
|
||||
m = PHYS_TO_VM_PAGE(pa);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_next);
|
||||
/* Instantiate the remaining NPTEPG - 1 pv entries. */
|
||||
va_last = va + NBPDR - PAGE_SIZE;
|
||||
do {
|
||||
@ -2557,7 +2557,7 @@ pmap_pv_promote_pde(pmap_t pmap, vm_offset_t va, vm_paddr_t pa)
|
||||
pv = pmap_pvh_remove(&m->md, pmap, va);
|
||||
KASSERT(pv != NULL, ("pmap_pv_promote_pde: pv not found"));
|
||||
pvh = pa_to_pvh(pa);
|
||||
TAILQ_INSERT_TAIL(&pvh->pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&pvh->pv_list, pv, pv_next);
|
||||
/* Free the remaining NPTEPG - 1 pv entries. */
|
||||
va_last = va + NBPDR - PAGE_SIZE;
|
||||
do {
|
||||
@ -2604,7 +2604,7 @@ pmap_insert_entry(pmap_t pmap, vm_offset_t va, vm_page_t m)
|
||||
PMAP_LOCK_ASSERT(pmap, MA_OWNED);
|
||||
pv = get_pv_entry(pmap, FALSE);
|
||||
pv->pv_va = va;
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_next);
|
||||
}
|
||||
|
||||
/*
|
||||
@ -2620,7 +2620,7 @@ pmap_try_insert_pv_entry(pmap_t pmap, vm_offset_t va, vm_page_t m)
|
||||
if (pv_entry_count < pv_entry_high_water &&
|
||||
(pv = get_pv_entry(pmap, TRUE)) != NULL) {
|
||||
pv->pv_va = va;
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_next);
|
||||
return (TRUE);
|
||||
} else
|
||||
return (FALSE);
|
||||
@ -2640,7 +2640,7 @@ pmap_pv_insert_pde(pmap_t pmap, vm_offset_t va, vm_paddr_t pa)
|
||||
(pv = get_pv_entry(pmap, TRUE)) != NULL) {
|
||||
pv->pv_va = va;
|
||||
pvh = pa_to_pvh(pa);
|
||||
TAILQ_INSERT_TAIL(&pvh->pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&pvh->pv_list, pv, pv_next);
|
||||
return (TRUE);
|
||||
} else
|
||||
return (FALSE);
|
||||
@ -3095,7 +3095,7 @@ pmap_remove_all(vm_page_t m)
|
||||
vm_page_dirty(m);
|
||||
pmap_unuse_pt(pmap, pv->pv_va, &free);
|
||||
pmap_invalidate_page(pmap, pv->pv_va);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_next);
|
||||
free_pv_entry(pmap, pv);
|
||||
PMAP_UNLOCK(pmap);
|
||||
}
|
||||
@ -3550,7 +3550,7 @@ pmap_enter(pmap_t pmap, vm_offset_t va, vm_prot_t access, vm_page_t m,
|
||||
if (pv == NULL)
|
||||
pv = get_pv_entry(pmap, FALSE);
|
||||
pv->pv_va = va;
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_next);
|
||||
pa |= PG_MANAGED;
|
||||
} else if (pv != NULL)
|
||||
free_pv_entry(pmap, pv);
|
||||
@ -4258,7 +4258,7 @@ pmap_page_exists_quick(pmap_t pmap, vm_page_t m)
|
||||
("pmap_page_exists_quick: page %p is not managed", m));
|
||||
rv = FALSE;
|
||||
rw_wlock(&pvh_global_lock);
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_next) {
|
||||
if (PV_PMAP(pv) == pmap) {
|
||||
rv = TRUE;
|
||||
break;
|
||||
@ -4269,7 +4269,7 @@ pmap_page_exists_quick(pmap_t pmap, vm_page_t m)
|
||||
}
|
||||
if (!rv && loops < 16 && (m->flags & PG_FICTITIOUS) == 0) {
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_next) {
|
||||
if (PV_PMAP(pv) == pmap) {
|
||||
rv = TRUE;
|
||||
break;
|
||||
@ -4321,7 +4321,7 @@ pmap_pvh_wired_mappings(struct md_page *pvh, int count)
|
||||
|
||||
rw_assert(&pvh_global_lock, RA_WLOCKED);
|
||||
sched_pin();
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pte = pmap_pte_quick(pmap, pv->pv_va);
|
||||
@ -4448,7 +4448,7 @@ pmap_remove_pages(pmap_t pmap)
|
||||
if ((tpte & PG_PS) != 0) {
|
||||
pmap->pm_stats.resident_count -= NBPDR / PAGE_SIZE;
|
||||
pvh = pa_to_pvh(tpte & PG_PS_FRAME);
|
||||
TAILQ_REMOVE(&pvh->pv_list, pv, pv_list);
|
||||
TAILQ_REMOVE(&pvh->pv_list, pv, pv_next);
|
||||
if (TAILQ_EMPTY(&pvh->pv_list)) {
|
||||
for (mt = m; mt < &m[NBPDR / PAGE_SIZE]; mt++)
|
||||
if (TAILQ_EMPTY(&mt->md.pv_list))
|
||||
@ -4466,7 +4466,7 @@ pmap_remove_pages(pmap_t pmap)
|
||||
}
|
||||
} else {
|
||||
pmap->pm_stats.resident_count--;
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_next);
|
||||
if (TAILQ_EMPTY(&m->md.pv_list) &&
|
||||
(m->flags & PG_FICTITIOUS) == 0) {
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
@ -4536,7 +4536,7 @@ pmap_is_modified_pvh(struct md_page *pvh)
|
||||
rw_assert(&pvh_global_lock, RA_WLOCKED);
|
||||
rv = FALSE;
|
||||
sched_pin();
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pte = pmap_pte_quick(pmap, pv->pv_va);
|
||||
@ -4609,7 +4609,7 @@ pmap_is_referenced_pvh(struct md_page *pvh)
|
||||
rw_assert(&pvh_global_lock, RA_WLOCKED);
|
||||
rv = FALSE;
|
||||
sched_pin();
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &pvh->pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pte = pmap_pte_quick(pmap, pv->pv_va);
|
||||
@ -4652,7 +4652,7 @@ pmap_remove_write(vm_page_t m)
|
||||
if ((m->flags & PG_FICTITIOUS) != 0)
|
||||
goto small_mappings;
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_list, next_pv) {
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_next, next_pv) {
|
||||
va = pv->pv_va;
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
@ -4662,7 +4662,7 @@ pmap_remove_write(vm_page_t m)
|
||||
PMAP_UNLOCK(pmap);
|
||||
}
|
||||
small_mappings:
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pde = pmap_pde(pmap, pv->pv_va);
|
||||
@ -4721,7 +4721,7 @@ pmap_ts_referenced(vm_page_t m)
|
||||
sched_pin();
|
||||
if ((m->flags & PG_FICTITIOUS) != 0)
|
||||
goto small_mappings;
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_list, pvn) {
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_next, pvn) {
|
||||
va = pv->pv_va;
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
@ -4755,9 +4755,9 @@ pmap_ts_referenced(vm_page_t m)
|
||||
if ((pv = TAILQ_FIRST(&m->md.pv_list)) != NULL) {
|
||||
pvf = pv;
|
||||
do {
|
||||
pvn = TAILQ_NEXT(pv, pv_list);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_list);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_list);
|
||||
pvn = TAILQ_NEXT(pv, pv_next);
|
||||
TAILQ_REMOVE(&m->md.pv_list, pv, pv_next);
|
||||
TAILQ_INSERT_TAIL(&m->md.pv_list, pv, pv_next);
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pde = pmap_pde(pmap, pv->pv_va);
|
||||
@ -4811,7 +4811,7 @@ pmap_clear_modify(vm_page_t m)
|
||||
if ((m->flags & PG_FICTITIOUS) != 0)
|
||||
goto small_mappings;
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_list, next_pv) {
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_next, next_pv) {
|
||||
va = pv->pv_va;
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
@ -4848,7 +4848,7 @@ pmap_clear_modify(vm_page_t m)
|
||||
PMAP_UNLOCK(pmap);
|
||||
}
|
||||
small_mappings:
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pde = pmap_pde(pmap, pv->pv_va);
|
||||
@ -4892,7 +4892,7 @@ pmap_clear_reference(vm_page_t m)
|
||||
if ((m->flags & PG_FICTITIOUS) != 0)
|
||||
goto small_mappings;
|
||||
pvh = pa_to_pvh(VM_PAGE_TO_PHYS(m));
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_list, next_pv) {
|
||||
TAILQ_FOREACH_SAFE(pv, &pvh->pv_list, pv_next, next_pv) {
|
||||
va = pv->pv_va;
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
@ -4915,7 +4915,7 @@ pmap_clear_reference(vm_page_t m)
|
||||
PMAP_UNLOCK(pmap);
|
||||
}
|
||||
small_mappings:
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
PMAP_LOCK(pmap);
|
||||
pde = pmap_pde(pmap, pv->pv_va);
|
||||
@ -5426,7 +5426,7 @@ pmap_pvdump(vm_paddr_t pa)
|
||||
|
||||
printf("pa %x", pa);
|
||||
m = PHYS_TO_VM_PAGE(pa);
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_list) {
|
||||
TAILQ_FOREACH(pv, &m->md.pv_list, pv_next) {
|
||||
pmap = PV_PMAP(pv);
|
||||
printf(" -> pmap %p, va %x", (void *)pmap, pv->pv_va);
|
||||
pads(pmap);
|
||||
|
@ -468,7 +468,7 @@ extern struct pmap kernel_pmap_store;
|
||||
*/
|
||||
typedef struct pv_entry {
|
||||
vm_offset_t pv_va; /* virtual address for mapping */
|
||||
TAILQ_ENTRY(pv_entry) pv_list;
|
||||
TAILQ_ENTRY(pv_entry) pv_next;
|
||||
} *pv_entry_t;
|
||||
|
||||
/*
|
||||
|
Loading…
Reference in New Issue
Block a user