Support better performance with P6 architectures and in SMP

mode.  Unnecessary TLB flushes removed.  More efficient
page zeroing on P6 (modify page only if non-zero.)
This commit is contained in:
John Dyson 1998-05-11 02:13:47 +00:00
parent f0175db1ee
commit 5498a452bc
9 changed files with 217 additions and 31 deletions

View File

@ -39,7 +39,7 @@
* SUCH DAMAGE.
*
* from: @(#)pmap.c 7.7 (Berkeley) 5/12/91
* $Id: pmap.c,v 1.193 1998/04/19 15:22:48 bde Exp $
* $Id: pmap.c,v 1.194 1998/05/11 01:06:08 dyson Exp $
*/
/*
@ -219,7 +219,7 @@ static vm_page_t _pmap_allocpte __P((pmap_t pmap, unsigned ptepindex));
static unsigned * pmap_pte_quick __P((pmap_t pmap, vm_offset_t va));
static vm_page_t pmap_page_lookup __P((vm_object_t object, vm_pindex_t pindex));
static int pmap_unuse_pt __P((pmap_t, vm_offset_t, vm_page_t));
static vm_offset_t pmap_kmem_choose(vm_offset_t addr) ;
static vm_offset_t pmap_kmem_choose(vm_offset_t addr);
void pmap_collect(void);
static unsigned pdir4mb;
@ -2770,12 +2770,16 @@ pmap_zero_page(phys)
#endif
*(int *) prv_CMAP3 = PG_V | PG_RW | (phys & PG_FRAME) | PG_A | PG_M;
invltlb_1pg((vm_offset_t) &prv_CPAGE3);
cpu_invlpg(&prv_CPAGE3);
bzero(&prv_CPAGE3, PAGE_SIZE);
#if defined(I686_CPU)
if (cpu_class == CPUCLASS_686)
i686_pagezero(&prv_CPAGE3);
else
#endif
bzero(&prv_CPAGE3, PAGE_SIZE);
*(int *) prv_CMAP3 = 0;
invltlb_1pg((vm_offset_t) &prv_CPAGE3);
#else
#if !defined(MAX_PERF)
if (*(int *) CMAP2)
@ -2783,9 +2787,15 @@ pmap_zero_page(phys)
#endif
*(int *) CMAP2 = PG_V | PG_RW | (phys & PG_FRAME) | PG_A | PG_M;
bzero(CADDR2, PAGE_SIZE);
invltlb_1pg(CADDR2);
#if defined(I686_CPU)
if (cpu_class == CPUCLASS_686)
i686_pagezero(CADDR2);
else
#endif
bzero(CADDR2, PAGE_SIZE);
*(int *) CMAP2 = 0;
invltlb_1pg((vm_offset_t) CADDR2);
#endif
}
@ -2811,13 +2821,13 @@ pmap_copy_page(src, dst)
*(int *) prv_CMAP1 = PG_V | (src & PG_FRAME) | PG_A;
*(int *) prv_CMAP2 = PG_V | PG_RW | (dst & PG_FRAME) | PG_A | PG_M;
invltlb_2pg( (vm_offset_t) &prv_CPAGE1, (vm_offset_t) &prv_CPAGE2);
cpu_invlpg(&prv_CPAGE1);
cpu_invlpg(&prv_CPAGE2);
bcopy(&prv_CPAGE1, &prv_CPAGE2, PAGE_SIZE);
*(int *) prv_CMAP1 = 0;
*(int *) prv_CMAP2 = 0;
invltlb_2pg( (vm_offset_t) &prv_CPAGE1, (vm_offset_t) &prv_CPAGE2);
#else
#if !defined(MAX_PERF)
if (*(int *) CMAP1 || *(int *) CMAP2)
@ -2831,7 +2841,6 @@ pmap_copy_page(src, dst)
*(int *) CMAP1 = 0;
*(int *) CMAP2 = 0;
invltlb_2pg( (vm_offset_t) CADDR1, (vm_offset_t) CADDR2);
#endif
}

View File

@ -30,7 +30,7 @@
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $Id: support.s,v 1.57 1997/09/02 20:05:30 bde Exp $
* $Id: support.s,v 1.58 1997/12/14 02:11:09 dyson Exp $
*/
#include "npx.h"
@ -335,6 +335,56 @@ intreg_i586_bzero:
ret
#endif /* I586_CPU && NNPX > 0 */
ENTRY(i686_pagezero)
pushl %edi
pushl %ebx
movl 12(%esp), %edi
movl $1024, %ecx
cld
ALIGN_TEXT
1:
xorl %eax, %eax
repe
scasl
jnz 2f
popl %ebx
popl %edi
ret
ALIGN_TEXT
2:
incl %ecx
subl $4, %edi
movl %ecx, %edx
cmpl $16, %ecx
jge 3f
movl %edi, %ebx
andl $0x3f, %ebx
shrl %ebx
shrl %ebx
movl $16, %ecx
subl %ebx, %ecx
3:
subl %ecx, %edx
rep
stosl
movl %edx, %ecx
testl %edx, %edx
jnz 1b
popl %ebx
popl %edi
ret
/* fillw(pat, base, cnt) */
ENTRY(fillw)
pushl %edi

View File

@ -30,7 +30,7 @@
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $Id: support.s,v 1.57 1997/09/02 20:05:30 bde Exp $
* $Id: support.s,v 1.58 1997/12/14 02:11:09 dyson Exp $
*/
#include "npx.h"
@ -335,6 +335,56 @@ intreg_i586_bzero:
ret
#endif /* I586_CPU && NNPX > 0 */
ENTRY(i686_pagezero)
pushl %edi
pushl %ebx
movl 12(%esp), %edi
movl $1024, %ecx
cld
ALIGN_TEXT
1:
xorl %eax, %eax
repe
scasl
jnz 2f
popl %ebx
popl %edi
ret
ALIGN_TEXT
2:
incl %ecx
subl $4, %edi
movl %ecx, %edx
cmpl $16, %ecx
jge 3f
movl %edi, %ebx
andl $0x3f, %ebx
shrl %ebx
shrl %ebx
movl $16, %ecx
subl %ebx, %ecx
3:
subl %ecx, %edx
rep
stosl
movl %edx, %ecx
testl %edx, %edx
jnz 1b
popl %ebx
popl %edi
ret
/* fillw(pat, base, cnt) */
ENTRY(fillw)
pushl %edi

View File

@ -30,7 +30,7 @@
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $Id: cpufunc.h,v 1.75 1998/01/25 17:02:00 kato Exp $
* $Id: cpufunc.h,v 1.76 1998/01/25 23:45:41 kato Exp $
*/
/*
@ -219,14 +219,21 @@ invd(void)
void invlpg __P((u_int addr));
void invltlb __P((void));
static __inline void
cpu_invlpg(void *addr)
{
__asm __volatile("invlpg %0"::"m"(*(char *)addr):"memory");
}
#else /* !SMP */
static __inline void
invlpg(u_int addr)
invlpg(void *addr)
{
__asm __volatile("invlpg (%0)" : : "r" (addr) : "memory");
__asm __volatile("invlpg %0"::"m"(*(char *)addr):"memory");
}
static __inline void
invltlb(void)
{
@ -432,5 +439,6 @@ void ltr __P((u_short sel));
u_int rcr0 __P((void));
u_long rcr3 __P((void));
u_long rcr4 __P((void));
void i686_pagezero __P((void *addr));
#endif /* !_MACHINE_CPUFUNC_H_ */

View File

@ -1,4 +1,4 @@
# $Id: options.i386,v 1.76 1998/03/09 22:09:11 eivind Exp $
# $Id: options.i386,v 1.77 1998/04/18 04:58:01 ahasty Exp $
BOUNCEPAGES opt_bounce.h
DISABLE_PSE
@ -74,6 +74,7 @@ I386_CPU opt_global.h
I486_CPU opt_global.h
I586_CPU opt_global.h
I686_CPU opt_global.h
LINUX opt_global.h
SC_SPLASH_SCREEN opt_syscons.h
MAXCONS opt_syscons.h

View File

@ -1,4 +1,4 @@
# $Id: options.i386,v 1.76 1998/03/09 22:09:11 eivind Exp $
# $Id: options.i386,v 1.77 1998/04/18 04:58:01 ahasty Exp $
BOUNCEPAGES opt_bounce.h
DISABLE_PSE
@ -74,6 +74,7 @@ I386_CPU opt_global.h
I486_CPU opt_global.h
I586_CPU opt_global.h
I686_CPU opt_global.h
LINUX opt_global.h
SC_SPLASH_SCREEN opt_syscons.h
MAXCONS opt_syscons.h

View File

@ -39,7 +39,7 @@
* SUCH DAMAGE.
*
* from: @(#)pmap.c 7.7 (Berkeley) 5/12/91
* $Id: pmap.c,v 1.193 1998/04/19 15:22:48 bde Exp $
* $Id: pmap.c,v 1.194 1998/05/11 01:06:08 dyson Exp $
*/
/*
@ -219,7 +219,7 @@ static vm_page_t _pmap_allocpte __P((pmap_t pmap, unsigned ptepindex));
static unsigned * pmap_pte_quick __P((pmap_t pmap, vm_offset_t va));
static vm_page_t pmap_page_lookup __P((vm_object_t object, vm_pindex_t pindex));
static int pmap_unuse_pt __P((pmap_t, vm_offset_t, vm_page_t));
static vm_offset_t pmap_kmem_choose(vm_offset_t addr) ;
static vm_offset_t pmap_kmem_choose(vm_offset_t addr);
void pmap_collect(void);
static unsigned pdir4mb;
@ -2770,12 +2770,16 @@ pmap_zero_page(phys)
#endif
*(int *) prv_CMAP3 = PG_V | PG_RW | (phys & PG_FRAME) | PG_A | PG_M;
invltlb_1pg((vm_offset_t) &prv_CPAGE3);
cpu_invlpg(&prv_CPAGE3);
bzero(&prv_CPAGE3, PAGE_SIZE);
#if defined(I686_CPU)
if (cpu_class == CPUCLASS_686)
i686_pagezero(&prv_CPAGE3);
else
#endif
bzero(&prv_CPAGE3, PAGE_SIZE);
*(int *) prv_CMAP3 = 0;
invltlb_1pg((vm_offset_t) &prv_CPAGE3);
#else
#if !defined(MAX_PERF)
if (*(int *) CMAP2)
@ -2783,9 +2787,15 @@ pmap_zero_page(phys)
#endif
*(int *) CMAP2 = PG_V | PG_RW | (phys & PG_FRAME) | PG_A | PG_M;
bzero(CADDR2, PAGE_SIZE);
invltlb_1pg(CADDR2);
#if defined(I686_CPU)
if (cpu_class == CPUCLASS_686)
i686_pagezero(CADDR2);
else
#endif
bzero(CADDR2, PAGE_SIZE);
*(int *) CMAP2 = 0;
invltlb_1pg((vm_offset_t) CADDR2);
#endif
}
@ -2811,13 +2821,13 @@ pmap_copy_page(src, dst)
*(int *) prv_CMAP1 = PG_V | (src & PG_FRAME) | PG_A;
*(int *) prv_CMAP2 = PG_V | PG_RW | (dst & PG_FRAME) | PG_A | PG_M;
invltlb_2pg( (vm_offset_t) &prv_CPAGE1, (vm_offset_t) &prv_CPAGE2);
cpu_invlpg(&prv_CPAGE1);
cpu_invlpg(&prv_CPAGE2);
bcopy(&prv_CPAGE1, &prv_CPAGE2, PAGE_SIZE);
*(int *) prv_CMAP1 = 0;
*(int *) prv_CMAP2 = 0;
invltlb_2pg( (vm_offset_t) &prv_CPAGE1, (vm_offset_t) &prv_CPAGE2);
#else
#if !defined(MAX_PERF)
if (*(int *) CMAP1 || *(int *) CMAP2)
@ -2831,7 +2841,6 @@ pmap_copy_page(src, dst)
*(int *) CMAP1 = 0;
*(int *) CMAP2 = 0;
invltlb_2pg( (vm_offset_t) CADDR1, (vm_offset_t) CADDR2);
#endif
}

View File

@ -30,7 +30,7 @@
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $Id: support.s,v 1.57 1997/09/02 20:05:30 bde Exp $
* $Id: support.s,v 1.58 1997/12/14 02:11:09 dyson Exp $
*/
#include "npx.h"
@ -335,6 +335,56 @@ intreg_i586_bzero:
ret
#endif /* I586_CPU && NNPX > 0 */
ENTRY(i686_pagezero)
pushl %edi
pushl %ebx
movl 12(%esp), %edi
movl $1024, %ecx
cld
ALIGN_TEXT
1:
xorl %eax, %eax
repe
scasl
jnz 2f
popl %ebx
popl %edi
ret
ALIGN_TEXT
2:
incl %ecx
subl $4, %edi
movl %ecx, %edx
cmpl $16, %ecx
jge 3f
movl %edi, %ebx
andl $0x3f, %ebx
shrl %ebx
shrl %ebx
movl $16, %ecx
subl %ebx, %ecx
3:
subl %ecx, %edx
rep
stosl
movl %edx, %ecx
testl %edx, %edx
jnz 1b
popl %ebx
popl %edi
ret
/* fillw(pat, base, cnt) */
ENTRY(fillw)
pushl %edi

View File

@ -30,7 +30,7 @@
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $Id: cpufunc.h,v 1.75 1998/01/25 17:02:00 kato Exp $
* $Id: cpufunc.h,v 1.76 1998/01/25 23:45:41 kato Exp $
*/
/*
@ -219,14 +219,21 @@ invd(void)
void invlpg __P((u_int addr));
void invltlb __P((void));
static __inline void
cpu_invlpg(void *addr)
{
__asm __volatile("invlpg %0"::"m"(*(char *)addr):"memory");
}
#else /* !SMP */
static __inline void
invlpg(u_int addr)
invlpg(void *addr)
{
__asm __volatile("invlpg (%0)" : : "r" (addr) : "memory");
__asm __volatile("invlpg %0"::"m"(*(char *)addr):"memory");
}
static __inline void
invltlb(void)
{
@ -432,5 +439,6 @@ void ltr __P((u_short sel));
u_int rcr0 __P((void));
u_long rcr3 __P((void));
u_long rcr4 __P((void));
void i686_pagezero __P((void *addr));
#endif /* !_MACHINE_CPUFUNC_H_ */