Split the low level trap code into trap, interrupt and syscall, its

easier and hopefully this code is done changing radically.

Don't use the mmu tlb register to address the kernel page table, nor
the 8k pointer register.  The hardware will do some of the page table
lookup by storing the the base address in an internal register and
calculating the address of the tte in the table.  However it is limited
to a 1 meg tsb, which only maps 512 megs.  The kernel page table only
has one level, so its easy to just do it by hand, which has the advantage
of supporting abitrary amounts of kvm and only costs a few more instructions.

Increase kvm to 1 gig now that its easy to do so and so we don't waste
most of a 4 meg page.

Fix some traces.  Fix more proc locking.

Call tsb_stte_promote if we get a soft fault on a mapping in the upper
levels of the tsb.  If there is an invalid or unreferenced mapping
in the primary tsb, it will be replaced.

Immediately fail for faults occuring in {f,s}uswintr.
This commit is contained in:
Jake Burkholder 2001-09-30 19:41:20 +00:00
parent 15b39a57a5
commit e5e8823f37
5 changed files with 699 additions and 243 deletions

View File

@ -35,6 +35,7 @@
#define IQ_SIZE (NPIL * 2)
#define IQ_MASK (IQ_SIZE - 1)
#define IH_SHIFT 3
#define IQE_SHIFT 5
#define IV_SHIFT 5

View File

@ -125,7 +125,7 @@
* tl bit allows us to detect both ranges with one test.
*
* This is:
* (((%tpc - %tba) >> 5) & ~0x200) >= 0x80 && <= 0xff
* 0x80 <= (((%tpc - %tba) >> 5) & ~0x200) <= 0xff
*
* Values outside of the trap table will produce negative or large positive
* results.
@ -314,10 +314,9 @@ ENTRY(tl0_kstack_fixup)
wrpr %o0, 0, %otherwin
wrpr %g0, 0, %canrestore
ldx [PCPU(CURTHREAD)], %o0
ldx [%o0 + TD_KSTACK], %o0
set KSTACK_PAGES * PAGE_SIZE - SPOFF - CCFSZ, %o1
ldx [%o0 + TD_PCB], %o0
retl
add %o0, %o1, %sp
sub %o0, SPOFF + CCFSZ, %sp
END(tl0_kstack_fixup)
/*
@ -392,23 +391,11 @@ END(tl0_sfsr_trap)
.macro tl0_intr level, mask
tl0_kstack
set \mask, %o2
b %xcc, tl0_intr_call_trap
b %xcc, tl0_intr
mov \level, %o1
.align 32
.endm
/*
* Actually call tl0_trap, and do some work that cannot be done in tl0_intr
* because of space constraints.
*/
ENTRY(tl0_intr_call_trap)
wr %o2, 0, %asr21
rdpr %pil, %o2
wrpr %g0, %o1, %pil
b %xcc, tl0_trap
mov T_INTR, %o0
END(tl0_intr_call_trap)
#define INTR(level, traplvl) \
tl ## traplvl ## _intr level, 1 << level
@ -902,6 +889,13 @@ END(tl0_sftrap)
.endr
.endm
.macro tl0_syscall
tl0_kstack
rdpr %pil, %o0
b %xcc, tl0_syscall
mov T_SYSCALL, %o0
.endm
.macro tl0_soft count
.rept \count
tl0_gen T_SOFT
@ -999,14 +993,14 @@ ENTRY(tl1_sfsr_trap)
mov %g1, %o0
END(tl1_align_trap)
.macro tl1_intr level, mask, type
.macro tl1_intr level, mask
tl1_kstack
rdpr %pil, %o2
wrpr %g0, \level, %pil
set \mask, %o3
wr %o3, 0, %asr21
mov T_INTR | T_KERNEL, %o0
b %xcc, tl1_trap
b %xcc, tl1_intr
mov \level, %o1
.align 32
.endm
@ -1026,6 +1020,7 @@ ENTRY(intr_enqueue)
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
ldx [%g2 + TD_PROC], %g2
add %g2, P_COMM, %g2
stx %g2, [%g1 + KTR_PARM2]
rdpr %tl, %g2
@ -1173,21 +1168,17 @@ END(intr_enqueue)
ldxa [%g0] ASI_DMMU_TAG_TARGET_REG, %g1
srlx %g1, TT_CTX_SHIFT, %g2
brnz,pn %g2, tl1_dmmu_miss_user
ldxa [%g0] ASI_DMMU_TSB_8KB_PTR_REG, %g2
EMPTY
/*
* Convert the tte pointer to an stte pointer, and add extra bits to
* accomodate for large tsb.
*/
sllx %g2, STTE_SHIFT - TTE_SHIFT, %g2
#ifdef notyet
mov AA_DMMU_TAR, %g3
ldxa [%g3] ASI_DMMU, %g3
srlx %g3, TSB_1M_STTE_SHIFT, %g3
and %g3, TSB_KERNEL_MASK >> TSB_1M_STTE_SHIFT, %g3
sllx %g3, TSB_1M_STTE_SHIFT, %g3
add %g2, %g3, %g2
#endif
set TSB_KERNEL_MASK, %g3
set TSB_KERNEL_MIN_ADDRESS, %g4
wr %g0, ASI_DMMU, %asi
ldxa [%g0 + AA_DMMU_TAR] %asi, %g2
srlx %g2, PAGE_SHIFT, %g2
and %g2, %g3, %g2
sllx %g2, STTE_SHIFT, %g2
add %g2, %g4, %g2
/*
* Load the tte, check that it's valid and that the tags match.
@ -1217,13 +1208,11 @@ END(intr_enqueue)
*/
2: wrpr %g0, PSTATE_ALT, %pstate
wr %g0, ASI_DMMU, %asi
ldxa [%g0 + AA_DMMU_TAR] %asi, %g1
tl1_kstack
sub %sp, MF_SIZEOF, %sp
stx %g1, [%sp + SPOFF + CCFSZ + MF_TAR]
wrpr %g0, PSTATE_ALT, %pstate
rdpr %pil, %o2
add %sp, SPOFF + CCFSZ, %o1
b %xcc, tl1_trap
@ -1481,7 +1470,8 @@ ENTRY(tl1_spill_topcb)
stx %g2, [%g6 + 8]
stx %g3, [%g6 + 16]
ldx [PCPU(CURPCB)], %g1
ldx [PCPU(CURTHREAD)], %g1
ldx [%g1 + TD_PCB], %g1
ldx [%g1 + PCB_NSAVED], %g2
sllx %g2, 3, %g3
@ -1501,7 +1491,8 @@ ENTRY(tl1_spill_topcb)
rdpr %tpc, %g2
stx %g2, [%g1 + KTR_PARM1]
stx %sp, [%g1 + KTR_PARM2]
ldx [PCPU(CURPCB)], %g2
ldx [PCPU(CURTHREAD)], %g2
ldx [%g2 + TD_PCB], %g2
ldx [%g2 + PCB_NSAVED], %g2
stx %g2, [%g1 + KTR_PARM3]
9:
@ -1648,7 +1639,8 @@ tl0_breakpoint:
tl0_gen T_BREAKPOINT ! 0x101 breakpoint
tl0_soft 6 ! 0x102-0x107 trap instruction
tl0_soft 1 ! 0x108 SVr4 syscall
tl0_gen T_SYSCALL ! 0x109 BSD syscall
tl0_bsd_syscall:
tl0_syscall ! 0x109 BSD syscall
tl0_soft 118 ! 0x110-0x17f trap instruction
tl0_reserved 128 ! 0x180-0x1ff reserved
@ -1818,8 +1810,70 @@ ENTRY(tl0_trap)
stx %g6, [%sp + SPOFF + CCFSZ + TF_G6]
stx %g7, [%sp + SPOFF + CCFSZ + TF_G7]
#if KTR_COMPILE & KTR_CT1
CATR(KTR_CT1, "tl0_trap: td=%p type=%#x arg=%#lx pil=%#lx ws=%#lx"
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl0_trap: td=%p type=%#x pil=%#lx pc=%#lx sp=%#lx"
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
stx %o0, [%g1 + KTR_PARM2]
stx %o2, [%g1 + KTR_PARM3]
rdpr %tpc, %g2
stx %g2, [%g1 + KTR_PARM4]
stx %i6, [%g1 + KTR_PARM5]
9:
#endif
stx %i0, [%sp + SPOFF + CCFSZ + TF_O0]
stx %i1, [%sp + SPOFF + CCFSZ + TF_O1]
stx %i2, [%sp + SPOFF + CCFSZ + TF_O2]
stx %i3, [%sp + SPOFF + CCFSZ + TF_O3]
stx %i4, [%sp + SPOFF + CCFSZ + TF_O4]
stx %i5, [%sp + SPOFF + CCFSZ + TF_O5]
stx %i6, [%sp + SPOFF + CCFSZ + TF_O6]
stx %i7, [%sp + SPOFF + CCFSZ + TF_O7]
.Ltl0_trap_spill:
call trap
add %sp, CCFSZ + SPOFF, %o0
b,a %xcc, tl0_ret
nop
END(tl0_trap)
ENTRY(tl0_syscall)
/*
* Force kernel store order.
*/
wrpr %g0, PSTATE_ALT, %pstate
sub %sp, TF_SIZEOF, %sp
rdpr %tstate, %l0
stx %l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
rdpr %tpc, %l1
stx %l1, [%sp + SPOFF + CCFSZ + TF_TPC]
rdpr %tnpc, %l2
stx %l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
stx %o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
stx %o1, [%sp + SPOFF + CCFSZ + TF_ARG]
stx %o2, [%sp + SPOFF + CCFSZ + TF_PIL]
stx %o3, [%sp + SPOFF + CCFSZ + TF_WSTATE]
mov %g7, %l0
wrpr %g0, PSTATE_NORMAL, %pstate
mov %l0, %g7 /* set up the normal %g7 */
wrpr %g0, PSTATE_KERNEL, %pstate
stx %g1, [%sp + SPOFF + CCFSZ + TF_G1]
stx %g2, [%sp + SPOFF + CCFSZ + TF_G2]
stx %g3, [%sp + SPOFF + CCFSZ + TF_G3]
stx %g4, [%sp + SPOFF + CCFSZ + TF_G4]
stx %g5, [%sp + SPOFF + CCFSZ + TF_G5]
stx %g6, [%sp + SPOFF + CCFSZ + TF_G6]
stx %g7, [%sp + SPOFF + CCFSZ + TF_G7]
#if KTR_COMPILE & KTR_SYSC
CATR(KTR_SYSC, "tl0_syscall: td=%p type=%#x arg=%#lx pil=%#lx ws=%#lx"
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
@ -1839,20 +1893,97 @@ ENTRY(tl0_trap)
stx %i6, [%sp + SPOFF + CCFSZ + TF_O6]
stx %i7, [%sp + SPOFF + CCFSZ + TF_O7]
.Ltl0_trap_spill:
call trap
call syscall
add %sp, CCFSZ + SPOFF, %o0
/* Fallthough. */
END(tl0_trap)
b,a %xcc, tl0_ret
nop
END(tl0_syscall)
/* Return to tl0 (user process). */
ENTRY(tl0_ret)
#if KTR_COMPILE & KTR_CT1
CATR(KTR_CT1, "tl0_ret: td=%p (%s) pil=%#lx sflag=%#x"
ENTRY(tl0_intr)
wr %o2, 0, %asr21
rdpr %pil, %o2
wrpr %g0, %o1, %pil
mov T_INTR, %o0
/*
* Force kernel store order.
*/
wrpr %g0, PSTATE_ALT, %pstate
sub %sp, TF_SIZEOF, %sp
rdpr %tstate, %l0
stx %l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
rdpr %tpc, %l1
stx %l1, [%sp + SPOFF + CCFSZ + TF_TPC]
rdpr %tnpc, %l2
stx %l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
stx %o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
stx %o1, [%sp + SPOFF + CCFSZ + TF_ARG]
stx %o2, [%sp + SPOFF + CCFSZ + TF_PIL]
stx %o3, [%sp + SPOFF + CCFSZ + TF_WSTATE]
mov %g7, %l0
wrpr %g0, PSTATE_NORMAL, %pstate
mov %l0, %g7 /* set up the normal %g7 */
wrpr %g0, PSTATE_KERNEL, %pstate
stx %g1, [%sp + SPOFF + CCFSZ + TF_G1]
stx %g2, [%sp + SPOFF + CCFSZ + TF_G2]
stx %g3, [%sp + SPOFF + CCFSZ + TF_G3]
stx %g4, [%sp + SPOFF + CCFSZ + TF_G4]
stx %g5, [%sp + SPOFF + CCFSZ + TF_G5]
stx %g6, [%sp + SPOFF + CCFSZ + TF_G6]
stx %g7, [%sp + SPOFF + CCFSZ + TF_G7]
#if KTR_COMPILE & KTR_INTR
CATR(KTR_INTR, "tl0_intr: td=%p type=%#x arg=%#lx pil=%#lx ws=%#lx"
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
stx %o0, [%g1 + KTR_PARM2]
stx %o1, [%g1 + KTR_PARM3]
stx %o2, [%g1 + KTR_PARM4]
stx %o3, [%g1 + KTR_PARM5]
9:
#endif
stx %i0, [%sp + SPOFF + CCFSZ + TF_O0]
stx %i1, [%sp + SPOFF + CCFSZ + TF_O1]
stx %i2, [%sp + SPOFF + CCFSZ + TF_O2]
stx %i3, [%sp + SPOFF + CCFSZ + TF_O3]
stx %i4, [%sp + SPOFF + CCFSZ + TF_O4]
stx %i5, [%sp + SPOFF + CCFSZ + TF_O5]
stx %i6, [%sp + SPOFF + CCFSZ + TF_O6]
stx %i7, [%sp + SPOFF + CCFSZ + TF_O7]
set cnt+V_INTR, %l0
lduw [%l0], %l1
1: add %l1, 1, %l2
casa [%l0] ASI_N, %l1, %l2
cmp %l1, %l2
bne,pn %xcc, 1b
mov %l2, %l1
set intr_handlers, %l0
sllx %o1, IH_SHIFT, %l1
add %l0, %l1, %l0
ldx [%l0 + IH_FUNC], %l1
call %l1
add %sp, CCFSZ + SPOFF, %o0
b,a %xcc, tl0_ret
nop
END(tl0_intr)
/* Return to tl0 (user process). */
ENTRY(tl0_ret)
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl0_ret: check ast td=%p (%s) pil=%#lx sflag=%#x"
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
ldx [%g2 + TD_PROC], %g2
add %g2, P_COMM, %g3
stx %g3, [%g1 + KTR_PARM2]
rdpr %pil, %g3
@ -1863,19 +1994,19 @@ ENTRY(tl0_ret)
#endif
wrpr %g0, PIL_TICK, %pil
ldx [PCPU(CURTHREAD)], %o0
ldx [%o0 + TD_KSE], %o0
lduw [%o0 + KE_FLAGS], %o1
and %o1, KEF_ASTPENDING | KEF_NEEDRESCHED, %o1
brz,pt %o1, 1f
ldx [PCPU(CURTHREAD)], %l0
ldx [%l0 + TD_KSE], %l1
lduw [%l1 + KE_FLAGS], %l2
and %l2, KEF_ASTPENDING | KEF_NEEDRESCHED, %l2
brz,pt %l2, 1f
nop
call ast
add %sp, CCFSZ + SPOFF, %o0
1: ldx [PCPU(CURPCB)], %o0
ldx [%o0 + PCB_NSAVED], %o1
1: ldx [%l0 + TD_PCB], %l1
ldx [%l1 + PCB_NSAVED], %l2
mov T_SPILL, %o0
brnz,a,pn %o1, .Ltl0_trap_spill
brnz,a,pn %l2, .Ltl0_trap_spill
stx %o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
ldx [%sp + SPOFF + CCFSZ + TF_G1], %g1
@ -1928,17 +2059,17 @@ ENTRY(tl0_ret)
restore
tl0_ret_fill:
#if KTR_COMPILE & KTR_CT1
CATR(KTR_CT1, "tl0_ret: return td=%#lx pil=%#lx ts=%#lx pc=%#lx sp=%#lx"
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl0_ret: td=%#lx pil=%#lx ts=%#lx pc=%#lx sp=%#lx"
, %g2, %g3, %g4, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g3
stx %g3, [%g2 + KTR_PARM1]
stx %l0, [%g2 + KTR_PARM2]
rdpr %tstate, %g3
stx %g3, [%g2 + KTR_PARM2]
rdpr %tpc, %g3
stx %g3, [%g2 + KTR_PARM3]
stx %sp, [%g2 + KTR_PARM4]
stx %g1, [%g2 + KTR_PARM5]
rdpr %tpc, %g3
stx %g3, [%g2 + KTR_PARM4]
stx %sp, [%g2 + KTR_PARM5]
9:
#endif
@ -1946,8 +2077,8 @@ tl0_ret_fill:
retry
tl0_ret_fill_end:
#if KTR_COMPILE & KTR_CT1
CATR(KTR_CT1, "tl0_ret: fill magic wstate=%#lx sp=%#lx"
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl0_ret: fill magic wstate=%#lx sp=%#lx"
, %l0, %l1, %l2, 7, 8, 9)
stx %l4, [%l0 + KTR_PARM1]
stx %sp, [%l0 + KTR_PARM2]
@ -1955,7 +2086,7 @@ tl0_ret_fill_end:
#endif
/*
* The fill failed and magic has been preformed. Call trap again,
* The fill failed and magic has been performed. Call trap again,
* which will copyin the window on the user's behalf.
*/
wrpr %l4, 0, %wstate
@ -1986,20 +2117,13 @@ ENTRY(tl1_trap)
rdpr %tnpc, %l2
stx %l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
#if KTR_COMPILE & KTR_CT1
setx trap_mask, %l4, %l3
andn %o1, T_KERNEL, %l4
mov 1, %l5
sllx %l5, %l4, %l4
ldx [%l3], %l5
and %l4, %l5, %l4
brz %l4, 9f
nop
CATR(KTR_CT1, "tl1_trap: td=%p pil=%#lx type=%#lx arg=%#lx pc=%#lx"
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl1_trap: td=%p pil=%#lx type=%#lx arg=%#lx pc=%#lx"
, %l3, %l4, %l5, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %l4
stx %l4, [%l3 + KTR_PARM1]
#if 0
ldx [%l4 + TD_PROC], %l4
add %l4, P_COMM, %l4
stx %l4, [%l3 + KTR_PARM2]
#else
@ -2054,17 +2178,8 @@ ENTRY(tl1_trap)
wrpr %l2, 0, %tpc
wrpr %l3, 0, %tnpc
#if KTR_COMPILE & KTR_CT1
ldx [%sp + SPOFF + CCFSZ + TF_TYPE], %l5
andn %l5, T_KERNEL, %l4
mov 1, %l5
sllx %l5, %l4, %l4
setx trap_mask, %l4, %l3
ldx [%l3], %l5
and %l4, %l5, %l4
brz %l4, 9f
nop
CATR(KTR_CT1, "tl1_trap: return td=%p pil=%#lx sp=%#lx pc=%#lx"
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl1_trap: return td=%p pil=%#lx sp=%#lx pc=%#lx"
, %l3, %l4, %l5, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %l4
stx %l4, [%l3 + KTR_PARM1]
@ -2078,17 +2193,116 @@ ENTRY(tl1_trap)
retry
END(tl1_trap)
ENTRY(tl1_intr)
sub %sp, TF_SIZEOF, %sp
rdpr %tstate, %l0
stx %l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
rdpr %tpc, %l1
stx %l1, [%sp + SPOFF + CCFSZ + TF_TPC]
rdpr %tnpc, %l2
stx %l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
#if KTR_COMPILE & KTR_INTR
CATR(KTR_INTR, "tl1_intr: td=%p pil=%#lx type=%#lx arg=%#lx pc=%#lx"
, %l3, %l4, %l5, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %l4
stx %l4, [%l3 + KTR_PARM1]
#if 0
ldx [%l4 + TD_PROC], %l4
add %l4, P_COMM, %l4
stx %l4, [%l3 + KTR_PARM2]
#else
stx %o2, [%l3 + KTR_PARM2]
#endif
andn %o0, T_KERNEL, %l4
stx %l4, [%l3 + KTR_PARM3]
stx %o1, [%l3 + KTR_PARM4]
stx %l1, [%l3 + KTR_PARM5]
9:
#endif
wrpr %g0, 1, %tl
/* We may have trapped before %g7 was set up correctly. */
mov %g7, %l0
wrpr %g0, PSTATE_NORMAL, %pstate
mov %l0, %g7
wrpr %g0, PSTATE_KERNEL, %pstate
stx %o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
stx %o1, [%sp + SPOFF + CCFSZ + TF_ARG]
stx %o2, [%sp + SPOFF + CCFSZ + TF_PIL]
stx %g1, [%sp + SPOFF + CCFSZ + TF_G1]
stx %g2, [%sp + SPOFF + CCFSZ + TF_G2]
stx %g3, [%sp + SPOFF + CCFSZ + TF_G3]
stx %g4, [%sp + SPOFF + CCFSZ + TF_G4]
stx %g5, [%sp + SPOFF + CCFSZ + TF_G5]
stx %g6, [%sp + SPOFF + CCFSZ + TF_G6]
set cnt+V_INTR, %l0
lduw [%l0], %l1
1: add %l1, 1, %l2
casa [%l0] ASI_N, %l1, %l2
cmp %l1, %l2
bne,pn %xcc, 1b
mov %l2, %l1
set intr_handlers, %l0
sllx %o1, IH_SHIFT, %l1
add %l0, %l1, %l0
ldx [%l0 + IH_FUNC], %l1
call %l1
add %sp, CCFSZ + SPOFF, %o0
ldx [%sp + SPOFF + CCFSZ + TF_G1], %g1
ldx [%sp + SPOFF + CCFSZ + TF_G2], %g2
ldx [%sp + SPOFF + CCFSZ + TF_G3], %g3
ldx [%sp + SPOFF + CCFSZ + TF_G4], %g4
ldx [%sp + SPOFF + CCFSZ + TF_G5], %g5
ldx [%sp + SPOFF + CCFSZ + TF_G6], %g6
ldx [%sp + SPOFF + CCFSZ + TF_PIL], %l0
ldx [%sp + SPOFF + CCFSZ + TF_TSTATE], %l1
ldx [%sp + SPOFF + CCFSZ + TF_TPC], %l2
ldx [%sp + SPOFF + CCFSZ + TF_TNPC], %l3
wrpr %g0, PSTATE_ALT, %pstate
wrpr %l0, 0, %pil
wrpr %g0, 2, %tl
wrpr %l1, 0, %tstate
wrpr %l2, 0, %tpc
wrpr %l3, 0, %tnpc
#if KTR_COMPILE & KTR_INTR
CATR(KTR_INTR, "tl1_intr: return td=%p pil=%#lx sp=%#lx pc=%#lx"
, %l3, %l4, %l5, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %l4
stx %l4, [%l3 + KTR_PARM1]
stx %l0, [%l3 + KTR_PARM2]
stx %sp, [%l3 + KTR_PARM3]
stx %l2, [%l3 + KTR_PARM4]
9:
#endif
restore
retry
END(tl1_intr)
/*
* Freshly forked processes come here when switched to for the first time.
* The arguments to fork_exit() have been setup in the locals, we must move
* them to the outs.
*/
ENTRY(fork_trampoline)
#if KTR_COMPILE & KTR_CT1
CATR(KTR_CT1, "fork_trampoline: td=%p (%s) cwp=%#lx"
#if KTR_COMPILE & KTR_PROC
CATR(KTR_PROC, "fork_trampoline: td=%p (%s) cwp=%#lx"
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
ldx [%g2 + TD_PROC], %g2
add %g2, P_COMM, %g2
stx %g2, [%g1 + KTR_PARM2]
rdpr %cwp, %g2
@ -2101,4 +2315,5 @@ ENTRY(fork_trampoline)
call fork_exit
nop
b,a %xcc, tl0_ret
nop
END(fork_trampoline)

View File

@ -125,7 +125,7 @@
* tl bit allows us to detect both ranges with one test.
*
* This is:
* (((%tpc - %tba) >> 5) & ~0x200) >= 0x80 && <= 0xff
* 0x80 <= (((%tpc - %tba) >> 5) & ~0x200) <= 0xff
*
* Values outside of the trap table will produce negative or large positive
* results.
@ -314,10 +314,9 @@ ENTRY(tl0_kstack_fixup)
wrpr %o0, 0, %otherwin
wrpr %g0, 0, %canrestore
ldx [PCPU(CURTHREAD)], %o0
ldx [%o0 + TD_KSTACK], %o0
set KSTACK_PAGES * PAGE_SIZE - SPOFF - CCFSZ, %o1
ldx [%o0 + TD_PCB], %o0
retl
add %o0, %o1, %sp
sub %o0, SPOFF + CCFSZ, %sp
END(tl0_kstack_fixup)
/*
@ -392,23 +391,11 @@ END(tl0_sfsr_trap)
.macro tl0_intr level, mask
tl0_kstack
set \mask, %o2
b %xcc, tl0_intr_call_trap
b %xcc, tl0_intr
mov \level, %o1
.align 32
.endm
/*
* Actually call tl0_trap, and do some work that cannot be done in tl0_intr
* because of space constraints.
*/
ENTRY(tl0_intr_call_trap)
wr %o2, 0, %asr21
rdpr %pil, %o2
wrpr %g0, %o1, %pil
b %xcc, tl0_trap
mov T_INTR, %o0
END(tl0_intr_call_trap)
#define INTR(level, traplvl) \
tl ## traplvl ## _intr level, 1 << level
@ -902,6 +889,13 @@ END(tl0_sftrap)
.endr
.endm
.macro tl0_syscall
tl0_kstack
rdpr %pil, %o0
b %xcc, tl0_syscall
mov T_SYSCALL, %o0
.endm
.macro tl0_soft count
.rept \count
tl0_gen T_SOFT
@ -999,14 +993,14 @@ ENTRY(tl1_sfsr_trap)
mov %g1, %o0
END(tl1_align_trap)
.macro tl1_intr level, mask, type
.macro tl1_intr level, mask
tl1_kstack
rdpr %pil, %o2
wrpr %g0, \level, %pil
set \mask, %o3
wr %o3, 0, %asr21
mov T_INTR | T_KERNEL, %o0
b %xcc, tl1_trap
b %xcc, tl1_intr
mov \level, %o1
.align 32
.endm
@ -1026,6 +1020,7 @@ ENTRY(intr_enqueue)
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
ldx [%g2 + TD_PROC], %g2
add %g2, P_COMM, %g2
stx %g2, [%g1 + KTR_PARM2]
rdpr %tl, %g2
@ -1173,21 +1168,17 @@ END(intr_enqueue)
ldxa [%g0] ASI_DMMU_TAG_TARGET_REG, %g1
srlx %g1, TT_CTX_SHIFT, %g2
brnz,pn %g2, tl1_dmmu_miss_user
ldxa [%g0] ASI_DMMU_TSB_8KB_PTR_REG, %g2
EMPTY
/*
* Convert the tte pointer to an stte pointer, and add extra bits to
* accomodate for large tsb.
*/
sllx %g2, STTE_SHIFT - TTE_SHIFT, %g2
#ifdef notyet
mov AA_DMMU_TAR, %g3
ldxa [%g3] ASI_DMMU, %g3
srlx %g3, TSB_1M_STTE_SHIFT, %g3
and %g3, TSB_KERNEL_MASK >> TSB_1M_STTE_SHIFT, %g3
sllx %g3, TSB_1M_STTE_SHIFT, %g3
add %g2, %g3, %g2
#endif
set TSB_KERNEL_MASK, %g3
set TSB_KERNEL_MIN_ADDRESS, %g4
wr %g0, ASI_DMMU, %asi
ldxa [%g0 + AA_DMMU_TAR] %asi, %g2
srlx %g2, PAGE_SHIFT, %g2
and %g2, %g3, %g2
sllx %g2, STTE_SHIFT, %g2
add %g2, %g4, %g2
/*
* Load the tte, check that it's valid and that the tags match.
@ -1217,13 +1208,11 @@ END(intr_enqueue)
*/
2: wrpr %g0, PSTATE_ALT, %pstate
wr %g0, ASI_DMMU, %asi
ldxa [%g0 + AA_DMMU_TAR] %asi, %g1
tl1_kstack
sub %sp, MF_SIZEOF, %sp
stx %g1, [%sp + SPOFF + CCFSZ + MF_TAR]
wrpr %g0, PSTATE_ALT, %pstate
rdpr %pil, %o2
add %sp, SPOFF + CCFSZ, %o1
b %xcc, tl1_trap
@ -1481,7 +1470,8 @@ ENTRY(tl1_spill_topcb)
stx %g2, [%g6 + 8]
stx %g3, [%g6 + 16]
ldx [PCPU(CURPCB)], %g1
ldx [PCPU(CURTHREAD)], %g1
ldx [%g1 + TD_PCB], %g1
ldx [%g1 + PCB_NSAVED], %g2
sllx %g2, 3, %g3
@ -1501,7 +1491,8 @@ ENTRY(tl1_spill_topcb)
rdpr %tpc, %g2
stx %g2, [%g1 + KTR_PARM1]
stx %sp, [%g1 + KTR_PARM2]
ldx [PCPU(CURPCB)], %g2
ldx [PCPU(CURTHREAD)], %g2
ldx [%g2 + TD_PCB], %g2
ldx [%g2 + PCB_NSAVED], %g2
stx %g2, [%g1 + KTR_PARM3]
9:
@ -1648,7 +1639,8 @@ tl0_breakpoint:
tl0_gen T_BREAKPOINT ! 0x101 breakpoint
tl0_soft 6 ! 0x102-0x107 trap instruction
tl0_soft 1 ! 0x108 SVr4 syscall
tl0_gen T_SYSCALL ! 0x109 BSD syscall
tl0_bsd_syscall:
tl0_syscall ! 0x109 BSD syscall
tl0_soft 118 ! 0x110-0x17f trap instruction
tl0_reserved 128 ! 0x180-0x1ff reserved
@ -1818,8 +1810,70 @@ ENTRY(tl0_trap)
stx %g6, [%sp + SPOFF + CCFSZ + TF_G6]
stx %g7, [%sp + SPOFF + CCFSZ + TF_G7]
#if KTR_COMPILE & KTR_CT1
CATR(KTR_CT1, "tl0_trap: td=%p type=%#x arg=%#lx pil=%#lx ws=%#lx"
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl0_trap: td=%p type=%#x pil=%#lx pc=%#lx sp=%#lx"
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
stx %o0, [%g1 + KTR_PARM2]
stx %o2, [%g1 + KTR_PARM3]
rdpr %tpc, %g2
stx %g2, [%g1 + KTR_PARM4]
stx %i6, [%g1 + KTR_PARM5]
9:
#endif
stx %i0, [%sp + SPOFF + CCFSZ + TF_O0]
stx %i1, [%sp + SPOFF + CCFSZ + TF_O1]
stx %i2, [%sp + SPOFF + CCFSZ + TF_O2]
stx %i3, [%sp + SPOFF + CCFSZ + TF_O3]
stx %i4, [%sp + SPOFF + CCFSZ + TF_O4]
stx %i5, [%sp + SPOFF + CCFSZ + TF_O5]
stx %i6, [%sp + SPOFF + CCFSZ + TF_O6]
stx %i7, [%sp + SPOFF + CCFSZ + TF_O7]
.Ltl0_trap_spill:
call trap
add %sp, CCFSZ + SPOFF, %o0
b,a %xcc, tl0_ret
nop
END(tl0_trap)
ENTRY(tl0_syscall)
/*
* Force kernel store order.
*/
wrpr %g0, PSTATE_ALT, %pstate
sub %sp, TF_SIZEOF, %sp
rdpr %tstate, %l0
stx %l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
rdpr %tpc, %l1
stx %l1, [%sp + SPOFF + CCFSZ + TF_TPC]
rdpr %tnpc, %l2
stx %l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
stx %o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
stx %o1, [%sp + SPOFF + CCFSZ + TF_ARG]
stx %o2, [%sp + SPOFF + CCFSZ + TF_PIL]
stx %o3, [%sp + SPOFF + CCFSZ + TF_WSTATE]
mov %g7, %l0
wrpr %g0, PSTATE_NORMAL, %pstate
mov %l0, %g7 /* set up the normal %g7 */
wrpr %g0, PSTATE_KERNEL, %pstate
stx %g1, [%sp + SPOFF + CCFSZ + TF_G1]
stx %g2, [%sp + SPOFF + CCFSZ + TF_G2]
stx %g3, [%sp + SPOFF + CCFSZ + TF_G3]
stx %g4, [%sp + SPOFF + CCFSZ + TF_G4]
stx %g5, [%sp + SPOFF + CCFSZ + TF_G5]
stx %g6, [%sp + SPOFF + CCFSZ + TF_G6]
stx %g7, [%sp + SPOFF + CCFSZ + TF_G7]
#if KTR_COMPILE & KTR_SYSC
CATR(KTR_SYSC, "tl0_syscall: td=%p type=%#x arg=%#lx pil=%#lx ws=%#lx"
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
@ -1839,20 +1893,97 @@ ENTRY(tl0_trap)
stx %i6, [%sp + SPOFF + CCFSZ + TF_O6]
stx %i7, [%sp + SPOFF + CCFSZ + TF_O7]
.Ltl0_trap_spill:
call trap
call syscall
add %sp, CCFSZ + SPOFF, %o0
/* Fallthough. */
END(tl0_trap)
b,a %xcc, tl0_ret
nop
END(tl0_syscall)
/* Return to tl0 (user process). */
ENTRY(tl0_ret)
#if KTR_COMPILE & KTR_CT1
CATR(KTR_CT1, "tl0_ret: td=%p (%s) pil=%#lx sflag=%#x"
ENTRY(tl0_intr)
wr %o2, 0, %asr21
rdpr %pil, %o2
wrpr %g0, %o1, %pil
mov T_INTR, %o0
/*
* Force kernel store order.
*/
wrpr %g0, PSTATE_ALT, %pstate
sub %sp, TF_SIZEOF, %sp
rdpr %tstate, %l0
stx %l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
rdpr %tpc, %l1
stx %l1, [%sp + SPOFF + CCFSZ + TF_TPC]
rdpr %tnpc, %l2
stx %l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
stx %o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
stx %o1, [%sp + SPOFF + CCFSZ + TF_ARG]
stx %o2, [%sp + SPOFF + CCFSZ + TF_PIL]
stx %o3, [%sp + SPOFF + CCFSZ + TF_WSTATE]
mov %g7, %l0
wrpr %g0, PSTATE_NORMAL, %pstate
mov %l0, %g7 /* set up the normal %g7 */
wrpr %g0, PSTATE_KERNEL, %pstate
stx %g1, [%sp + SPOFF + CCFSZ + TF_G1]
stx %g2, [%sp + SPOFF + CCFSZ + TF_G2]
stx %g3, [%sp + SPOFF + CCFSZ + TF_G3]
stx %g4, [%sp + SPOFF + CCFSZ + TF_G4]
stx %g5, [%sp + SPOFF + CCFSZ + TF_G5]
stx %g6, [%sp + SPOFF + CCFSZ + TF_G6]
stx %g7, [%sp + SPOFF + CCFSZ + TF_G7]
#if KTR_COMPILE & KTR_INTR
CATR(KTR_INTR, "tl0_intr: td=%p type=%#x arg=%#lx pil=%#lx ws=%#lx"
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
stx %o0, [%g1 + KTR_PARM2]
stx %o1, [%g1 + KTR_PARM3]
stx %o2, [%g1 + KTR_PARM4]
stx %o3, [%g1 + KTR_PARM5]
9:
#endif
stx %i0, [%sp + SPOFF + CCFSZ + TF_O0]
stx %i1, [%sp + SPOFF + CCFSZ + TF_O1]
stx %i2, [%sp + SPOFF + CCFSZ + TF_O2]
stx %i3, [%sp + SPOFF + CCFSZ + TF_O3]
stx %i4, [%sp + SPOFF + CCFSZ + TF_O4]
stx %i5, [%sp + SPOFF + CCFSZ + TF_O5]
stx %i6, [%sp + SPOFF + CCFSZ + TF_O6]
stx %i7, [%sp + SPOFF + CCFSZ + TF_O7]
set cnt+V_INTR, %l0
lduw [%l0], %l1
1: add %l1, 1, %l2
casa [%l0] ASI_N, %l1, %l2
cmp %l1, %l2
bne,pn %xcc, 1b
mov %l2, %l1
set intr_handlers, %l0
sllx %o1, IH_SHIFT, %l1
add %l0, %l1, %l0
ldx [%l0 + IH_FUNC], %l1
call %l1
add %sp, CCFSZ + SPOFF, %o0
b,a %xcc, tl0_ret
nop
END(tl0_intr)
/* Return to tl0 (user process). */
ENTRY(tl0_ret)
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl0_ret: check ast td=%p (%s) pil=%#lx sflag=%#x"
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
ldx [%g2 + TD_PROC], %g2
add %g2, P_COMM, %g3
stx %g3, [%g1 + KTR_PARM2]
rdpr %pil, %g3
@ -1863,19 +1994,19 @@ ENTRY(tl0_ret)
#endif
wrpr %g0, PIL_TICK, %pil
ldx [PCPU(CURTHREAD)], %o0
ldx [%o0 + TD_KSE], %o0
lduw [%o0 + KE_FLAGS], %o1
and %o1, KEF_ASTPENDING | KEF_NEEDRESCHED, %o1
brz,pt %o1, 1f
ldx [PCPU(CURTHREAD)], %l0
ldx [%l0 + TD_KSE], %l1
lduw [%l1 + KE_FLAGS], %l2
and %l2, KEF_ASTPENDING | KEF_NEEDRESCHED, %l2
brz,pt %l2, 1f
nop
call ast
add %sp, CCFSZ + SPOFF, %o0
1: ldx [PCPU(CURPCB)], %o0
ldx [%o0 + PCB_NSAVED], %o1
1: ldx [%l0 + TD_PCB], %l1
ldx [%l1 + PCB_NSAVED], %l2
mov T_SPILL, %o0
brnz,a,pn %o1, .Ltl0_trap_spill
brnz,a,pn %l2, .Ltl0_trap_spill
stx %o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
ldx [%sp + SPOFF + CCFSZ + TF_G1], %g1
@ -1928,17 +2059,17 @@ ENTRY(tl0_ret)
restore
tl0_ret_fill:
#if KTR_COMPILE & KTR_CT1
CATR(KTR_CT1, "tl0_ret: return td=%#lx pil=%#lx ts=%#lx pc=%#lx sp=%#lx"
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl0_ret: td=%#lx pil=%#lx ts=%#lx pc=%#lx sp=%#lx"
, %g2, %g3, %g4, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g3
stx %g3, [%g2 + KTR_PARM1]
stx %l0, [%g2 + KTR_PARM2]
rdpr %tstate, %g3
stx %g3, [%g2 + KTR_PARM2]
rdpr %tpc, %g3
stx %g3, [%g2 + KTR_PARM3]
stx %sp, [%g2 + KTR_PARM4]
stx %g1, [%g2 + KTR_PARM5]
rdpr %tpc, %g3
stx %g3, [%g2 + KTR_PARM4]
stx %sp, [%g2 + KTR_PARM5]
9:
#endif
@ -1946,8 +2077,8 @@ tl0_ret_fill:
retry
tl0_ret_fill_end:
#if KTR_COMPILE & KTR_CT1
CATR(KTR_CT1, "tl0_ret: fill magic wstate=%#lx sp=%#lx"
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl0_ret: fill magic wstate=%#lx sp=%#lx"
, %l0, %l1, %l2, 7, 8, 9)
stx %l4, [%l0 + KTR_PARM1]
stx %sp, [%l0 + KTR_PARM2]
@ -1955,7 +2086,7 @@ tl0_ret_fill_end:
#endif
/*
* The fill failed and magic has been preformed. Call trap again,
* The fill failed and magic has been performed. Call trap again,
* which will copyin the window on the user's behalf.
*/
wrpr %l4, 0, %wstate
@ -1986,20 +2117,13 @@ ENTRY(tl1_trap)
rdpr %tnpc, %l2
stx %l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
#if KTR_COMPILE & KTR_CT1
setx trap_mask, %l4, %l3
andn %o1, T_KERNEL, %l4
mov 1, %l5
sllx %l5, %l4, %l4
ldx [%l3], %l5
and %l4, %l5, %l4
brz %l4, 9f
nop
CATR(KTR_CT1, "tl1_trap: td=%p pil=%#lx type=%#lx arg=%#lx pc=%#lx"
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl1_trap: td=%p pil=%#lx type=%#lx arg=%#lx pc=%#lx"
, %l3, %l4, %l5, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %l4
stx %l4, [%l3 + KTR_PARM1]
#if 0
ldx [%l4 + TD_PROC], %l4
add %l4, P_COMM, %l4
stx %l4, [%l3 + KTR_PARM2]
#else
@ -2054,17 +2178,8 @@ ENTRY(tl1_trap)
wrpr %l2, 0, %tpc
wrpr %l3, 0, %tnpc
#if KTR_COMPILE & KTR_CT1
ldx [%sp + SPOFF + CCFSZ + TF_TYPE], %l5
andn %l5, T_KERNEL, %l4
mov 1, %l5
sllx %l5, %l4, %l4
setx trap_mask, %l4, %l3
ldx [%l3], %l5
and %l4, %l5, %l4
brz %l4, 9f
nop
CATR(KTR_CT1, "tl1_trap: return td=%p pil=%#lx sp=%#lx pc=%#lx"
#if KTR_COMPILE & KTR_TRAP
CATR(KTR_TRAP, "tl1_trap: return td=%p pil=%#lx sp=%#lx pc=%#lx"
, %l3, %l4, %l5, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %l4
stx %l4, [%l3 + KTR_PARM1]
@ -2078,17 +2193,116 @@ ENTRY(tl1_trap)
retry
END(tl1_trap)
ENTRY(tl1_intr)
sub %sp, TF_SIZEOF, %sp
rdpr %tstate, %l0
stx %l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
rdpr %tpc, %l1
stx %l1, [%sp + SPOFF + CCFSZ + TF_TPC]
rdpr %tnpc, %l2
stx %l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
#if KTR_COMPILE & KTR_INTR
CATR(KTR_INTR, "tl1_intr: td=%p pil=%#lx type=%#lx arg=%#lx pc=%#lx"
, %l3, %l4, %l5, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %l4
stx %l4, [%l3 + KTR_PARM1]
#if 0
ldx [%l4 + TD_PROC], %l4
add %l4, P_COMM, %l4
stx %l4, [%l3 + KTR_PARM2]
#else
stx %o2, [%l3 + KTR_PARM2]
#endif
andn %o0, T_KERNEL, %l4
stx %l4, [%l3 + KTR_PARM3]
stx %o1, [%l3 + KTR_PARM4]
stx %l1, [%l3 + KTR_PARM5]
9:
#endif
wrpr %g0, 1, %tl
/* We may have trapped before %g7 was set up correctly. */
mov %g7, %l0
wrpr %g0, PSTATE_NORMAL, %pstate
mov %l0, %g7
wrpr %g0, PSTATE_KERNEL, %pstate
stx %o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
stx %o1, [%sp + SPOFF + CCFSZ + TF_ARG]
stx %o2, [%sp + SPOFF + CCFSZ + TF_PIL]
stx %g1, [%sp + SPOFF + CCFSZ + TF_G1]
stx %g2, [%sp + SPOFF + CCFSZ + TF_G2]
stx %g3, [%sp + SPOFF + CCFSZ + TF_G3]
stx %g4, [%sp + SPOFF + CCFSZ + TF_G4]
stx %g5, [%sp + SPOFF + CCFSZ + TF_G5]
stx %g6, [%sp + SPOFF + CCFSZ + TF_G6]
set cnt+V_INTR, %l0
lduw [%l0], %l1
1: add %l1, 1, %l2
casa [%l0] ASI_N, %l1, %l2
cmp %l1, %l2
bne,pn %xcc, 1b
mov %l2, %l1
set intr_handlers, %l0
sllx %o1, IH_SHIFT, %l1
add %l0, %l1, %l0
ldx [%l0 + IH_FUNC], %l1
call %l1
add %sp, CCFSZ + SPOFF, %o0
ldx [%sp + SPOFF + CCFSZ + TF_G1], %g1
ldx [%sp + SPOFF + CCFSZ + TF_G2], %g2
ldx [%sp + SPOFF + CCFSZ + TF_G3], %g3
ldx [%sp + SPOFF + CCFSZ + TF_G4], %g4
ldx [%sp + SPOFF + CCFSZ + TF_G5], %g5
ldx [%sp + SPOFF + CCFSZ + TF_G6], %g6
ldx [%sp + SPOFF + CCFSZ + TF_PIL], %l0
ldx [%sp + SPOFF + CCFSZ + TF_TSTATE], %l1
ldx [%sp + SPOFF + CCFSZ + TF_TPC], %l2
ldx [%sp + SPOFF + CCFSZ + TF_TNPC], %l3
wrpr %g0, PSTATE_ALT, %pstate
wrpr %l0, 0, %pil
wrpr %g0, 2, %tl
wrpr %l1, 0, %tstate
wrpr %l2, 0, %tpc
wrpr %l3, 0, %tnpc
#if KTR_COMPILE & KTR_INTR
CATR(KTR_INTR, "tl1_intr: return td=%p pil=%#lx sp=%#lx pc=%#lx"
, %l3, %l4, %l5, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %l4
stx %l4, [%l3 + KTR_PARM1]
stx %l0, [%l3 + KTR_PARM2]
stx %sp, [%l3 + KTR_PARM3]
stx %l2, [%l3 + KTR_PARM4]
9:
#endif
restore
retry
END(tl1_intr)
/*
* Freshly forked processes come here when switched to for the first time.
* The arguments to fork_exit() have been setup in the locals, we must move
* them to the outs.
*/
ENTRY(fork_trampoline)
#if KTR_COMPILE & KTR_CT1
CATR(KTR_CT1, "fork_trampoline: td=%p (%s) cwp=%#lx"
#if KTR_COMPILE & KTR_PROC
CATR(KTR_PROC, "fork_trampoline: td=%p (%s) cwp=%#lx"
, %g1, %g2, %g3, 7, 8, 9)
ldx [PCPU(CURTHREAD)], %g2
stx %g2, [%g1 + KTR_PARM1]
ldx [%g2 + TD_PROC], %g2
add %g2, P_COMM, %g2
stx %g2, [%g1 + KTR_PARM2]
rdpr %cwp, %g2
@ -2101,4 +2315,5 @@ ENTRY(fork_trampoline)
call fork_exit
nop
b,a %xcc, tl0_ret
nop
END(fork_trampoline)

View File

@ -34,6 +34,7 @@
#include <sys/proc.h>
#include <sys/queue.h>
#include <sys/user.h>
#include <sys/vmmeter.h>
#include <vm/vm.h>
#include <vm/vm_param.h>
@ -60,17 +61,11 @@
ASSYM(KERNBASE, KERNBASE);
/*
* XXX: gas, as of version 2.11.2, does not know this ASI (and some other
* UltraSparc specific ones). This definition will probably get us into trouble
* as soon as they are added.
*/
ASSYM(ASI_BLK_S, ASI_BLK_S);
ASSYM(EFAULT, EFAULT);
ASSYM(ENAMETOOLONG, ENAMETOOLONG);
ASSYM(KSTACK_PAGES, KSTACK_PAGES);
ASSYM(KSTACK_GUARD_PAGES, KSTACK_GUARD_PAGES);
ASSYM(UAREA_PAGES, UAREA_PAGES);
ASSYM(PAGE_SIZE, PAGE_SIZE);
@ -89,15 +84,16 @@ ASSYM(TSB_KERNEL_MIN_ADDRESS, TSB_KERNEL_MIN_ADDRESS);
ASSYM(TSB_PRIMARY_MASK_WIDTH, TSB_MASK_WIDTH);
ASSYM(TSB_PRIMARY_STTE_MASK, TSB_PRIMARY_STTE_MASK);
ASSYM(TSB_PRIMARY_STTE_SHIFT, TSB_PRIMARY_STTE_SHIFT);
ASSYM(TSB_1M_STTE_SHIFT, TSB_1M_STTE_SHIFT);
ASSYM(TSB_KERNEL_MASK, TSB_KERNEL_MASK);
ASSYM(PAGE_SHIFT, PAGE_SHIFT);
ASSYM(PAGE_MASK, PAGE_MASK);
ASSYM(KTR_COMPILE, KTR_COMPILE);
ASSYM(KTR_TRAP, KTR_TRAP);
ASSYM(KTR_SYSC, KTR_SYSC);
ASSYM(KTR_INTR, KTR_INTR);
ASSYM(KTR_CT1, KTR_CT1);
ASSYM(KTR_CT2, KTR_CT2);
ASSYM(KTR_SIZEOF, sizeof(struct ktr_entry));
ASSYM(KTR_DESC, offsetof(struct ktr_entry, ktr_desc));
@ -125,6 +121,8 @@ ASSYM(TT_VA_MASK, TT_VA_MASK);
ASSYM(TT_VA_SHIFT, TT_VA_SHIFT);
ASSYM(TT_CTX_SHIFT, TT_CTX_SHIFT);
ASSYM(V_INTR, offsetof(struct vmmeter, v_intr));
ASSYM(GD_CURTHREAD, offsetof(struct globaldata, gd_curthread));
ASSYM(GD_CURPCB, offsetof(struct globaldata, gd_curpcb));
ASSYM(GD_CPUID, offsetof(struct globaldata, gd_cpuid));
@ -132,6 +130,9 @@ ASSYM(GD_CPUID, offsetof(struct globaldata, gd_cpuid));
ASSYM(GD_IQ, offsetof(struct globaldata, gd_iq));
ASSYM(GD_IVT, offsetof(struct globaldata, gd_ivt));
ASSYM(IH_SHIFT, IH_SHIFT);
ASSYM(IH_FUNC, offsetof(struct intr_handler, ih_func));
ASSYM(IQ_MASK, IQ_MASK);
ASSYM(IQ_HEAD, offsetof(struct intr_queue, iq_head));
ASSYM(IQ_TAIL, offsetof(struct intr_queue, iq_tail));
@ -167,6 +168,7 @@ ASSYM(TD_KSTACK, offsetof(struct thread, td_kstack));
ASSYM(TD_PCB, offsetof(struct thread, td_pcb));
ASSYM(TD_PROC, offsetof(struct thread, td_proc));
ASSYM(PCB_SIZEOF, sizeof(struct pcb));
ASSYM(PCB_FPSTATE, offsetof(struct pcb, pcb_fpstate));
ASSYM(PCB_FP, offsetof(struct pcb, pcb_fp));
ASSYM(PCB_PC, offsetof(struct pcb, pcb_pc));
@ -177,8 +179,6 @@ ASSYM(PCB_NSAVED, offsetof(struct pcb, pcb_nsaved));
ASSYM(PCB_RWSP, offsetof(struct pcb, pcb_rwsp));
ASSYM(PCB_RW, offsetof(struct pcb, pcb_rw));
ASSYM(PCB_CWP_EMPTY, PCB_CWP_EMPTY);
ASSYM(VM_PMAP, offsetof(struct vmspace, vm_pmap));
ASSYM(PM_CONTEXT, offsetof(struct pmap, pm_context));
ASSYM(PM_STTE, offsetof(struct pmap, pm_stte));

View File

@ -77,10 +77,12 @@
void trap(struct trapframe *tf);
int trap_mmu_fault(struct thread *td, struct trapframe *tf);
void syscall(struct thread *td, struct trapframe *tf, u_int sticks);
void syscall(struct trapframe *tf);
u_long trap_mask = 0xffffffffffffffffL & ~(1 << T_INTR);
extern char fsbail[];
extern char *syscallnames[];
const char *trap_msg[] = {
@ -123,40 +125,33 @@ const char *trap_msg[] = {
void
trap(struct trapframe *tf)
{
u_int sticks;
struct thread *td;
struct proc *p;
u_int sticks;
int error;
int ucode;
int mask;
int type;
int sig;
int mask;
KASSERT(PCPU_GET(curthread) != NULL, ("trap: curthread NULL"));
KASSERT(PCPU_GET(curpcb) != NULL, ("trap: curpcb NULL"));
KASSERT(PCPU_GET(curthread)->td_kse != NULL, ("trap: curkse NULL"));
KASSERT(PCPU_GET(curthread)->td_proc != NULL, ("trap: curproc NULL"));
atomic_add_int(&cnt.v_trap, 1);
error = 0;
td = PCPU_GET(curthread);
p = td->td_proc;
error = 0;
type = tf->tf_type;
ucode = type; /* XXX */
sticks = 0;
#if KTR_COMPILE & KTR_TRAP
if (trap_mask & (1 << (type & ~T_KERNEL))) {
CTR5(KTR_TRAP, "trap: %s type=%s (%s) ws=%#lx ow=%#lx",
p->p_comm, trap_msg[type & ~T_KERNEL],
((type & T_KERNEL) ? "kernel" : "user"),
rdpr(wstate), rdpr(otherwin));
}
#endif
if (type == T_SYSCALL)
cnt.v_syscall++;
else if ((type & ~T_KERNEL) == T_INTR)
cnt.v_intr++;
else
cnt.v_trap++;
CTR5(KTR_TRAP, "trap: %s type=%s (%s) ws=%#lx ow=%#lx",
p->p_comm, trap_msg[type & ~T_KERNEL],
((type & T_KERNEL) ? "kernel" : "user"),
rdpr(wstate), rdpr(otherwin));
if ((type & T_KERNEL) == 0) {
sticks = td->td_kse->ke_sticks;
@ -164,6 +159,10 @@ trap(struct trapframe *tf)
}
switch (type) {
/*
* User Mode Traps
*/
case T_ALIGN:
case T_ALIGN_LDDF:
case T_ALIGN_STDF:
@ -198,19 +197,22 @@ trap(struct trapframe *tf)
sig = error;
goto trapsig;
case T_FILL:
if (rwindow_load(td, tf, 2))
if (rwindow_load(td, tf, 2)) {
PROC_LOCK(p);
sigexit(td, SIGILL);
/* Not reached. */
}
goto out;
case T_FILL_RET:
if (rwindow_load(td, tf, 1))
if (rwindow_load(td, tf, 1)) {
PROC_LOCK(p);
sigexit(td, SIGILL);
/* Not reached. */
}
goto out;
case T_INSN_ILLEGAL:
sig = SIGILL;
goto trapsig;
case T_INTR:
intr_dispatch(tf->tf_arg, tf);
goto out;
case T_PRIV_ACTION:
case T_PRIV_OPCODE:
sig = SIGBUS;
@ -219,16 +221,19 @@ trap(struct trapframe *tf)
sig = SIGILL;
goto trapsig;
case T_SPILL:
if (rwindow_save(td))
if (rwindow_save(td)) {
PROC_LOCK(p);
sigexit(td, SIGILL);
goto out;
case T_SYSCALL:
/* syscall() calls userret(), so we need goto out; */
syscall(td, tf, sticks);
/* Not reached. */
}
goto out;
case T_TAG_OVFLW:
sig = SIGEMT;
goto trapsig;
/*
* Kernel Mode Traps
*/
#ifdef DDB
case T_BREAKPOINT | T_KERNEL:
if (kdb_trap(tf) != 0)
@ -241,9 +246,6 @@ trap(struct trapframe *tf)
if (error == 0)
goto out;
break;
case T_INTR | T_KERNEL:
intr_dispatch(tf->tf_arg, tf);
goto out;
case T_WATCH_VIRT | T_KERNEL:
/*
* At the moment, just print the information from the trap,
@ -296,11 +298,7 @@ trapsig:
user:
userret(td, tf, sticks);
out:
#if KTR_COMPILE & KTR_TRAP
if (trap_mask & (1 << (type & ~T_KERNEL))) {
CTR1(KTR_TRAP, "trap: p=%p return", p);
}
#endif
CTR1(KTR_TRAP, "trap: td=%p return", td);
return;
}
@ -322,14 +320,16 @@ trap_mmu_fault(struct thread *td, struct trapframe *tf)
int rv;
p = td->td_proc;
KASSERT(td->td_pcb != NULL, ("trap_dmmu_miss: pcb NULL"));
KASSERT(p->p_vmspace != NULL, ("trap_dmmu_miss: vmspace NULL"));
rv = KERN_SUCCESS;
mf = (struct mmuframe *)tf->tf_arg;
ctx = TLB_TAR_CTX(mf->mf_tar);
pcb = PCPU_GET(curpcb);
pcb = td->td_pcb;
type = tf->tf_type & ~T_KERNEL;
va = TLB_TAR_VA(mf->mf_tar);
stp = NULL;
CTR4(KTR_TRAP, "trap_mmu_fault: td=%p pm_ctx=%#lx va=%#lx ctx=%#lx",
td, p->p_vmspace->vm_pmap.pm_context, va, ctx);
@ -358,7 +358,8 @@ trap_mmu_fault(struct thread *td, struct trapframe *tf)
tlb_store(TLB_DTLB, va, ctx, tte);
}
} else if (tf->tf_type & T_KERNEL &&
(td->td_intr_nesting_level != 0 || pcb->pcb_onfault == NULL)) {
(td->td_intr_nesting_level != 0 || pcb->pcb_onfault == NULL ||
pcb->pcb_onfault == fsbail)) {
rv = KERN_FAILURE;
} else {
mtx_lock(&Giant);
@ -392,17 +393,28 @@ trap_mmu_fault(struct thread *td, struct trapframe *tf)
PROC_LOCK(p);
--p->p_lock;
PROC_UNLOCK(p);
} else if (type == T_IMMU_MISS) {
if ((stp->st_tte.tte_data & TD_EXEC) == 0)
rv = KERN_FAILURE;
else
} else {
stp = tsb_stte_promote(pm, va, stp);
stp->st_tte.tte_data |= TD_REF;
switch (type) {
case T_IMMU_MISS:
if ((stp->st_tte.tte_data & TD_EXEC) == 0) {
rv = KERN_FAILURE;
break;
}
tlb_store(TLB_DTLB | TLB_ITLB, va, ctx,
stp->st_tte);
} else if (type == T_DMMU_PROT &&
(stp->st_tte.tte_data & TD_SW) == 0) {
rv = KERN_FAILURE;
} else {
tlb_store(TLB_DTLB, va, ctx, stp->st_tte);
break;
case T_DMMU_PROT:
if ((stp->st_tte.tte_data & TD_SW) == 0) {
rv = KERN_FAILURE;
break;
}
/* Fallthrough. */
case T_DMMU_MISS:
tlb_store(TLB_DTLB, va, ctx, stp->st_tte);
break;
}
}
mtx_unlock(&Giant);
}
@ -428,25 +440,39 @@ trap_mmu_fault(struct thread *td, struct trapframe *tf)
* in %g1 (and also saved in the trap frame).
*/
void
syscall(struct thread *td, struct trapframe *tf, u_int sticks)
syscall(struct trapframe *tf)
{
struct sysent *callp;
struct thread *td;
register_t args[8];
register_t *argp;
struct proc *p;
u_int sticks;
u_long code;
u_long tpc;
int reg;
int regcnt;
int narg;
int error;
register_t args[8];
register_t *argp;
KASSERT(PCPU_GET(curthread) != NULL, ("trap: curthread NULL"));
KASSERT(PCPU_GET(curthread)->td_kse != NULL, ("trap: curkse NULL"));
KASSERT(PCPU_GET(curthread)->td_proc != NULL, ("trap: curproc NULL"));
atomic_add_int(&cnt.v_syscall, 1);
td = PCPU_GET(curthread);
p = td->td_proc;
narg = 0;
error = 0;
reg = 0;
regcnt = REG_MAXARGS;
sticks = td->td_kse->ke_sticks;
td->td_frame = tf;
code = tf->tf_global[1];
p = td->td_proc;
/*
* For syscalls, we don't want to retry the faulting instruction
* (usually), instead we need to advance one instruction.
@ -588,5 +614,4 @@ bad:
#endif
mtx_assert(&sched_lock, MA_NOTOWNED);
mtx_assert(&Giant, MA_NOTOWNED);
}