Reimplement atomic operations on PDEs and PTEs in pmap.h. This change

significantly reduces duplicate code and make it easier to read.

Reviewed by:	alc, bde
This commit is contained in:
Jung-uk Kim 2013-08-21 22:40:29 +00:00
parent 274132ac23
commit 1533b9f714
2 changed files with 20 additions and 118 deletions

View File

@ -206,41 +206,14 @@ extern u_int64_t KPML4phys; /* physical address of kernel level 4 */
pt_entry_t *vtopte(vm_offset_t);
#define vtophys(va) pmap_kextract(((vm_offset_t) (va)))
static __inline pt_entry_t
pte_load(pt_entry_t *ptep)
{
pt_entry_t r;
#define pte_load_store(ptep, pte) atomic_swap_long(ptep, pte)
#define pte_load_clear(ptep) atomic_swap_long(ptep, 0)
#define pte_store(ptep, pte) do { \
*(u_long *)(ptep) = (u_long)(pte); \
} while (0)
#define pte_clear(ptep) pte_store(ptep, 0)
r = *ptep;
return (r);
}
static __inline pt_entry_t
pte_load_store(pt_entry_t *ptep, pt_entry_t pte)
{
pt_entry_t r;
__asm __volatile(
"xchgq %0,%1"
: "=m" (*ptep),
"=r" (r)
: "1" (pte),
"m" (*ptep));
return (r);
}
#define pte_load_clear(pte) atomic_readandclear_long(pte)
static __inline void
pte_store(pt_entry_t *ptep, pt_entry_t pte)
{
*ptep = pte;
}
#define pte_clear(ptep) pte_store((ptep), (pt_entry_t)0ULL)
#define pde_store(pdep, pde) pte_store((pdep), (pde))
#define pde_store(pdep, pde) pte_store(pdep, pde)
extern pt_entry_t pg_nx;

View File

@ -326,98 +326,27 @@ pmap_kextract(vm_offset_t va)
#if defined(PAE) && !defined(XEN)
#define pde_cmpset(pdep, old, new) \
atomic_cmpset_64((pdep), (old), (new))
static __inline pt_entry_t
pte_load(pt_entry_t *ptep)
{
pt_entry_t r;
__asm __volatile(
"lock; cmpxchg8b %1"
: "=A" (r)
: "m" (*ptep), "a" (0), "d" (0), "b" (0), "c" (0));
return (r);
}
static __inline pt_entry_t
pte_load_store(pt_entry_t *ptep, pt_entry_t v)
{
pt_entry_t r;
r = *ptep;
__asm __volatile(
"1:\n"
"\tlock; cmpxchg8b %1\n"
"\tjnz 1b"
: "+A" (r)
: "m" (*ptep), "b" ((uint32_t)v), "c" ((uint32_t)(v >> 32)));
return (r);
}
/* XXXRU move to atomic.h? */
static __inline int
atomic_cmpset_64(volatile uint64_t *dst, uint64_t exp, uint64_t src)
{
int64_t res = exp;
__asm __volatile (
" lock ; "
" cmpxchg8b %2 ; "
" setz %%al ; "
" movzbl %%al,%0 ; "
"# atomic_cmpset_64"
: "+A" (res), /* 0 (result) */
"=m" (*dst) /* 1 */
: "m" (*dst), /* 2 */
"b" ((uint32_t)src),
"c" ((uint32_t)(src >> 32)));
return (res);
}
#define pte_load_clear(ptep) pte_load_store((ptep), (pt_entry_t)0ULL)
#define pte_store(ptep, pte) pte_load_store((ptep), (pt_entry_t)pte)
#define pde_cmpset(pdep, old, new) atomic_cmpset_64_i586(pdep, old, new)
#define pte_load_store(ptep, pte) atomic_swap_64_i586(ptep, pte)
#define pte_load_clear(ptep) atomic_swap_64_i586(ptep, 0)
#define pte_store(ptep, pte) atomic_store_rel_64_i586(ptep, pte)
extern pt_entry_t pg_nx;
#elif !defined(PAE) && !defined (XEN)
#elif !defined(PAE) && !defined(XEN)
#define pde_cmpset(pdep, old, new) \
atomic_cmpset_int((pdep), (old), (new))
static __inline pt_entry_t
pte_load(pt_entry_t *ptep)
{
pt_entry_t r;
r = *ptep;
return (r);
}
static __inline pt_entry_t
pte_load_store(pt_entry_t *ptep, pt_entry_t pte)
{
__asm volatile("xchgl %0, %1" : "+m" (*ptep), "+r" (pte));
return (pte);
}
#define pte_load_clear(pte) atomic_readandclear_int(pte)
static __inline void
pte_store(pt_entry_t *ptep, pt_entry_t pte)
{
*ptep = pte;
}
#define pde_cmpset(pdep, old, new) atomic_cmpset_int(pdep, old, new)
#define pte_load_store(ptep, pte) atomic_swap_int(ptep, pte)
#define pte_load_clear(ptep) atomic_swap_int(ptep, 0)
#define pte_store(ptep, pte) do { \
*(u_int *)(ptep) = (u_int)(pte); \
} while (0)
#endif /* PAE */
#define pte_clear(ptep) pte_store((ptep), (pt_entry_t)0ULL)
#define pte_clear(ptep) pte_store(ptep, 0)
#define pde_store(pdep, pde) pte_store((pdep), (pde))
#define pde_store(pdep, pde) pte_store(pdep, pde)
#endif /* _KERNEL */