Make sure to check for pending interrupts on the final STI. For this

reorganise the IPL vectors a bit so that they can be used from more
places than splllower and doreti.
Include patch from Kazushi Marukawa (fixed to handle pending interrupts),
which should fix the read_psl() == 0 assertion failure reported by
several users.
This commit is contained in:
bouyer 2007-06-28 20:50:30 +00:00
parent c82e20912d
commit 6be5ba1e96
3 changed files with 77 additions and 14 deletions

View File

@ -1,4 +1,4 @@
/* $NetBSD: locore.S,v 1.25 2007/05/17 14:51:35 yamt Exp $ */
/* $NetBSD: locore.S,v 1.26 2007/06/28 20:50:30 bouyer Exp $ */
/* NetBSD: locore.S,v 1.31 2004/08/26 10:12:33 junyoung Exp */
/*
@ -663,6 +663,7 @@ switch_skipsave:
* Switch to newlwp's stack.
*/
CLI(%ebx)
movl L_ADDR(%edi),%ebx
movl PCB_EBP(%ebx),%ebp
movl PCB_ESP(%ebx),%esp
@ -690,6 +691,14 @@ switch_skipsave:
jne check_ras
switch_return:
STIC(%ebx)
jz 1f
call _C_LABEL(stipending)
testl %eax,%eax
jz 1f
pushl CPUVAR(ILEVEL)
call _C_LABEL(Xspllower) # process pending interrupts
1:
movl %esi,%eax # return 'oldlwp'
popl %edi
popl %esi
@ -780,9 +789,29 @@ syscall1:
call _C_LABEL(trap)
addl $4,%esp
jmp .Lsyscall_checkast
1: STI(%eax)
CHECK_DEFERRED_SWITCH(%eax)
1: CHECK_DEFERRED_SWITCH(%eax)
jnz 9f
STIC(%eax)
jz 14f
call _C_LABEL(stipending)
testl %eax,%eax
jz 14f
/* process pending interrupts */
CLI(%eax)
movl CPUVAR(ILEVEL), %ebx
movl $.Lsyscall_resume, %esi # address to resume loop at
.Lsyscall_resume:
movl %ebx,%eax # get cpl
movl CPUVAR(IUNMASK)(,%eax,4),%eax
andl CPUVAR(IPENDING),%eax # any non-masked bits left?
jz 17f
bsrl %eax,%eax
btrl %eax,CPUVAR(IPENDING)
movl CPUVAR(ISOURCES)(,%eax,4),%eax
jmp *IS_RESUME(%eax)
17: movl %ebx, CPUVAR(ILEVEL) #restore cpl
jmp .Lsyscall_checkast
14:
#ifndef DIAGNOSTIC
INTRFASTEXIT
#else /* DIAGNOSTIC */
@ -801,7 +830,8 @@ syscall1:
5: .asciz "WARNING: SPL NOT ZERO ON SYSCALL ENTRY\n"
6: .asciz "WARNING: WANT PMAPLOAD ON SYSCALL ENTRY\n"
#endif /* DIAGNOSTIC */
9: call _C_LABEL(pmap_load)
9: STI(%eax)
call _C_LABEL(pmap_load)
jmp .Lsyscall_checkast /* re-check ASTs */
#if NNPX > 0

View File

@ -1,4 +1,4 @@
/* $NetBSD: spl.S,v 1.9 2007/06/25 20:09:34 bouyer Exp $ */
/* $NetBSD: spl.S,v 1.10 2007/06/28 20:50:30 bouyer Exp $ */
/* NetBSD: spl.S,v 1.11 2004/02/20 17:35:01 yamt Exp */
/*
@ -144,8 +144,6 @@ IDTVEC(spllower)
* called with interrupt disabled.
*/
IDTVEC(doreti)
IDEPTH_DECR
popl %ebx # get previous priority
.Ldoreti_resume:
movl $.Ldoreti_resume,%esi # address to resume loop at
movl %ebx,%eax

View File

@ -1,4 +1,4 @@
/* $NetBSD: vector.S,v 1.18 2007/06/25 20:09:34 bouyer Exp $ */
/* $NetBSD: vector.S,v 1.19 2007/06/28 20:50:31 bouyer Exp $ */
/* NetBSD: 1.13 2004/03/11 11:39:26 yamt Exp */
/*
@ -194,8 +194,8 @@ IDTVEC(recurse_/**/name/**/num) ;\
subl $4,%esp ;\
pushl $T_ASTFLT /* trap # for doing ASTs */ ;\
INTRENTRY ;\
movl $_C_LABEL(Xdoreti), %esi; /* we now have a trap frame, so loop using doreti instead */ ;\
IDTVEC(resume_/**/name/**/num) \
/*movl %esp,%ecx*/ ;\
movl $IREENT_MAGIC,TF_ERR(%esp) ;\
pushl %ebx ;\
movl CPUVAR(ISOURCES) + (num) * 4, %ebp ;\
@ -216,7 +216,9 @@ IDTVEC(resume_/**/name/**/num) \
CLI(%eax) ;\
unmask(num) /* unmask it in hardware */ ;\
late_ack(num) ;\
jmp _C_LABEL(Xdoreti) /* lower spl and do ASTs */ ;\
IDEPTH_DECR ;\
popl %ebx ;\
jmp *%esi /* lower spl and do ASTs */ ;\
# Just unmasking the event isn't enouth, we also need to
# reassert the event pending bit if needed. For now just call
@ -578,6 +580,7 @@ calltrap:
pushl %esp
call _C_LABEL(trap)
addl $4,%esp
.Lalltraps_checkusr:
testb $CHK_UPL,TF_CS(%esp)
jnz .Lalltraps_checkast
#ifdef VM86
@ -604,8 +607,24 @@ calltrap:
6: STIC(%eax)
jz 4f
call _C_LABEL(stipending)
#testl %eax,%eax /* XXXcl */
#jnz 1b
testl %eax,%eax
jz 4f
/* process pending interrupts */
CLI(%eax)
movl CPUVAR(ILEVEL), %ebx
movl $.Lalltraps_resume, %esi # address to resume loop at
.Lalltraps_resume:
movl %ebx,%eax # get cpl
movl CPUVAR(IUNMASK)(,%eax,4),%eax
andl CPUVAR(IPENDING),%eax # any non-masked bits left?
jz 7f
bsrl %eax,%eax
btrl %eax,CPUVAR(IPENDING)
movl CPUVAR(ISOURCES)(,%eax,4),%eax
jmp *IS_RESUME(%eax)
7: movl %ebx, CPUVAR(ILEVEL) #restore cpl
jmp .Lalltraps_checkusr
4:
#ifndef DIAGNOSTIC
INTRFASTEXIT
@ -644,6 +663,7 @@ IDTVEC(trap0e)
call _C_LABEL(trap)
addl $4,%esp
addl $4,%esp
.Ltrap0e_checkusr:
testb $CHK_UPL,TF_CS(%esp)
jnz trap0e_checkast
#ifdef VM86
@ -670,8 +690,23 @@ trap0e_checkast:
6: STIC(%eax)
jz 4f
call _C_LABEL(stipending)
#testl %eax,%eax /* XXXcl */
#jnz 1b
testl %eax,%eax
jz 4f
/* process for pending interrupts */
CLI(%eax)
movl CPUVAR(ILEVEL), %ebx
movl $.Ltrap0e_resume, %esi # address to resume loop at
.Ltrap0e_resume:
movl %ebx,%eax # get cpl
movl CPUVAR(IUNMASK)(,%eax,4),%eax
andl CPUVAR(IPENDING),%eax # any non-masked bits left?
jz 7f
bsrl %eax,%eax
btrl %eax,CPUVAR(IPENDING)
movl CPUVAR(ISOURCES)(,%eax,4),%eax
jmp *IS_RESUME(%eax)
7: movl %ebx, CPUVAR(ILEVEL) #restore cpl
jmp .Ltrap0e_checkusr
4:
#ifndef DIAGNOSTIC
INTRFASTEXIT