Change the xen CLI() and STI() defines to only use one scratch register.

As well as saving an instruction, in one place it saves a push/pop pair.
This commit is contained in:
dsl 2007-12-21 19:18:13 +00:00
parent 6f23ff186c
commit 9b0b44bbdc
5 changed files with 46 additions and 50 deletions

View File

@ -1,4 +1,4 @@
/* $NetBSD: lock_stubs.S,v 1.11 2007/12/20 23:46:10 ad Exp $ */
/* $NetBSD: lock_stubs.S,v 1.12 2007/12/21 19:18:13 dsl Exp $ */
/*-
* Copyright (c) 2006, 2007 The NetBSD Foundation, Inc.
@ -136,11 +136,11 @@ NENTRY(mutex_spin_exit)
cmpl CPU_INFO_ILEVEL(%r8), %edi
jae 1f
movl CPU_INFO_IUNMASK(%r8,%rdi,4), %esi
CLI(ax,10)
CLI(ax)
testl CPU_INFO_IPENDING(%r8), %esi
jnz _C_LABEL(Xspllower)
movl %edi, CPU_INFO_ILEVEL(%r8)
STI(ax,10)
STI(ax)
1: rep /* double byte ret as branch */
ret /* target: see AMD docs */

View File

@ -1,4 +1,4 @@
/* $NetBSD: locore.S,v 1.35 2007/12/03 20:21:32 ad Exp $ */
/* $NetBSD: locore.S,v 1.36 2007/12/21 19:18:14 dsl Exp $ */
/*
* Copyright-o-rama!
@ -1030,9 +1030,7 @@ IDTVEC(syscall)
#else
/* Xen already switched to kernel stack */
pushq %rsi
pushq %rdi
STI(si,di)
popq %rdi
STI(si)
popq %rsi
addq $0x10,%rsp
pushq $2 /* error code */
@ -1053,19 +1051,19 @@ IDTVEC(syscall)
call *P_MD_SYSCALL(%r15)
.Lsyscall_checkast:
/* Check for ASTs on exit to user mode. */
CLI(si,di)
CLI(si)
CHECK_ASTPENDING(%r14)
je 1f
/* Always returning to user mode here. */
CLEAR_ASTPENDING(%r14)
STI(si,di)
STI(si)
/* Pushed T_ASTFLT into tf_trapno on entry. */
movq %rsp,%rdi
call _C_LABEL(trap)
jmp .Lsyscall_checkast /* re-check ASTs */
1: CHECK_DEFERRED_SWITCH
jnz 9f
STI(si,di)
STI(si)
testl $MDP_IRET, L_MD_FLAGS(%r14)
jne iret_return;
syscall_return:
@ -1114,7 +1112,7 @@ syscall_return:
4: .asciz "WARNING: SPL NOT LOWERED ON SYSCALL %d %d EXIT %x %x\n"
#endif
9: STI(si,di)
9: STI(si)
call _C_LABEL(do_pmap_load)
jmp .Lsyscall_checkast /* re-check ASTs */
@ -1188,7 +1186,7 @@ IDTVEC(osyscall)
osyscall1:
pushq $T_ASTFLT # trap # for doing ASTs
INTRENTRY
STI(si,di)
STI(si)
movq CPUVAR(CURLWP),%r14
movq %rsp,L_MD_REGS(%r14) # save pointer to frame
movq L_PROC(%r14),%rdx
@ -1197,12 +1195,12 @@ osyscall1:
_C_LABEL(osyscall_return):
.Losyscall_checkast:
/* Check for ASTs on exit to user mode. */
CLI(si,di)
CLI(si)
CHECK_ASTPENDING(%r14)
je 1f
/* Always returning to user mode here. */
CLEAR_ASTPENDING(%r14)
STI(si,di)
STI(si)
/* Pushed T_ASTFLT into tf_trapno on entry. */
movq %rsp,%rdi
call _C_LABEL(trap)
@ -1217,7 +1215,7 @@ iret_return:
jne 3f
INTRFASTEXIT
3:
STI(si,di)
STI(si)
movabsq $4f, %rdi
xorq %rax,%rax
call _C_LABEL(printf)
@ -1226,7 +1224,7 @@ iret_return:
jmp .Losyscall_checkast
4: .asciz "WARNING: SPL NOT LOWERED ON SYSCALL EXIT\n"
#endif /* DIAGNOSTIC */
9: STI(si,di)
9: STI(si)
call _C_LABEL(do_pmap_load)
jmp .Losyscall_checkast /* re-check ASTs */

View File

@ -1,4 +1,4 @@
/* $NetBSD: spl.S,v 1.14 2007/12/04 08:03:46 ad Exp $ */
/* $NetBSD: spl.S,v 1.15 2007/12/21 19:18:14 dsl Exp $ */
/*
* Copyright (c) 2003 Wasabi Systems, Inc.
@ -251,7 +251,7 @@ IDTVEC(spllower)
leaq 1f(%rip),%r13 # address to resume loop at
1: movl %ebx,%eax # get cpl
movl CPUVAR(IUNMASK)(,%rax,4),%eax
CLI(si,di)
CLI(si)
andl CPUVAR(IPENDING),%eax # any non-masked bits left?
jz 2f
bsrl %eax,%eax
@ -260,7 +260,7 @@ IDTVEC(spllower)
jmp *IS_RECURSE(%rax)
2:
movl %ebx,CPUVAR(ILEVEL)
STI(si,di)
STI(si)
popq %r12
popq %r13
popq %rbx
@ -279,7 +279,7 @@ IDTVEC(doreti)
leaq 1f(%rip),%r13
1: movl %ebx,%eax
movl CPUVAR(IUNMASK)(,%rax,4),%eax
CLI(si,di)
CLI(si)
andl CPUVAR(IPENDING),%eax
jz 2f
bsrl %eax,%eax # slow, but not worth optimizing
@ -297,12 +297,12 @@ doreti_checkast:
CHECK_ASTPENDING(%r14)
je 3f
CLEAR_ASTPENDING(%r14)
STI(si,di)
STI(si)
movl $T_ASTFLT,TF_TRAPNO(%rsp) /* XXX undo later.. */
/* Pushed T_ASTFLT into tf_trapno on entry. */
movq %rsp,%rdi
call _C_LABEL(trap)
CLI(si,di)
CLI(si)
jmp doreti_checkast
3:
CHECK_DEFERRED_SWITCH
@ -310,9 +310,9 @@ doreti_checkast:
6:
INTRFASTEXIT
9:
STI(si,di)
STI(si)
call _C_LABEL(do_pmap_load)
CLI(si,di)
CLI(si)
jmp doreti_checkast /* recheck ASTs */
#ifdef XEN

View File

@ -1,4 +1,4 @@
/* $NetBSD: vector.S,v 1.17 2007/12/03 20:21:33 ad Exp $ */
/* $NetBSD: vector.S,v 1.18 2007/12/21 19:18:14 dsl Exp $ */
/*
* Copyright (c) 2001 Wasabi Systems, Inc.
@ -70,6 +70,7 @@
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <machine/asm.h>
#include "opt_ddb.h"
#include "opt_multiprocessor.h"
@ -79,7 +80,6 @@
#include <machine/i8259.h>
#include <machine/i82093reg.h>
#include <machine/i82489reg.h>
#include <machine/asm.h>
#include <machine/frameasm.h>
#include <machine/segments.h>
#include <machine/trap.h>
@ -136,7 +136,7 @@ IDTVEC(trap07)
pushq $0 # dummy error code
pushq $T_DNA
INTRENTRY
STI(si,di)
STI(si)
movq CPUVAR(SELF),%rdi
call _C_LABEL(fpudna)
INTRFASTEXIT
@ -225,7 +225,7 @@ NENTRY(resume_pop_es)
*/
NENTRY(alltraps)
INTRENTRY
STI(si,di)
STI(si)
calltrap:
#ifdef DIAGNOSTIC
movl CPUVAR(ILEVEL),%ebx
@ -238,11 +238,11 @@ calltrap:
.Lalltraps_checkast:
movq CPUVAR(CURLWP),%r14
/* Check for ASTs on exit to user mode. */
CLI(si,di) ;\
CLI(si)
CHECK_ASTPENDING(%r14)
je 3f
CLEAR_ASTPENDING(%r14)
STI(si,di)
STI(si)
movl $T_ASTFLT,TF_TRAPNO(%rsp)
movq %rsp,%rdi
call _C_LABEL(trap)
@ -255,7 +255,7 @@ calltrap:
6: cmpl CPUVAR(ILEVEL),%ebx
jne 3f
INTRFASTEXIT
3: STI(si,di)
3: STI(si)
movabsq $4f,%rdi
movl CPUVAR(ILEVEL),%esi
movl %ebx,%edx
@ -266,7 +266,7 @@ calltrap:
jmp .Lalltraps_checkast
4: .asciz "WARNING: SPL NOT LOWERED ON TRAP EXIT %x %x\n"
#endif /* DIAGNOSTIC */
9: STI(si,di)
9: STI(si)
call _C_LABEL(do_pmap_load)
jmp .Lalltraps_checkast /* re-check ASTs */
@ -871,7 +871,7 @@ IDTVEC(resume_/**/name/**/num) \
1: \
pushq %r13 ;\
movl $num,CPUVAR(ILEVEL) ;\
STI(si,di) ;\
STI(si) ;\
incl CPUVAR(IDEPTH) ;\
movq IS_HANDLERS(%r14),%rbx ;\
6: \
@ -882,10 +882,10 @@ IDTVEC(resume_/**/name/**/num) \
testq %rbx,%rbx ;\
jnz 6b ;\
5: \
CLI(si,di) ;\
CLI(si) ;\
unmask(num) /* unmask it in hardware */ ;\
late_ack(num) ;\
CLI(si,di) ;\
CLI(si) ;\
jmp _C_LABEL(Xdoreti) /* lower spl and do ASTs */ ;\
# The unmask func for Xen events

View File

@ -1,4 +1,4 @@
/* $NetBSD: frameasm.h,v 1.8 2007/11/22 16:16:45 bouyer Exp $ */
/* $NetBSD: frameasm.h,v 1.9 2007/12/21 19:18:15 dsl Exp $ */
#ifndef _AMD64_MACHINE_FRAMEASM_H
#define _AMD64_MACHINE_FRAMEASM_H
@ -137,21 +137,19 @@
#define CLEAR_ASTPENDING(reg) movl $0, L_MD_ASTPENDING(reg)
#ifdef XEN
#define CLI(reg1,reg2) \
movl CPUVAR(CPUID),%e/**/reg1 ; \
shlq $6,%r/**/reg1 ; \
movq _C_LABEL(HYPERVISOR_shared_info),%r/**/reg2 ; \
addq %r/**/reg1,%r/**/reg2 ; \
movb $1,EVTCHN_UPCALL_MASK(%r/**/reg2)
#define STI(reg1,reg2) \
movl CPUVAR(CPUID),%e/**/reg1 ; \
shlq $6,%r/**/reg1 ; \
movq _C_LABEL(HYPERVISOR_shared_info),%r/**/reg2 ; \
addq %r/**/reg1,%r/**/reg2 ; \
movb $0,EVTCHN_UPCALL_MASK(%r/**/reg2)
#define CLI(temp_reg) \
movl CPUVAR(CPUID),%e/**/temp_reg ; \
shlq $6,%r/**/temp_reg ; \
addq _C_LABEL(HYPERVISOR_shared_info),%r/**/temp_reg ; \
movb $1,EVTCHN_UPCALL_MASK(%r/**/temp_reg)
#define STI(temp_reg) \
movl CPUVAR(CPUID),%e/**/temp_reg ; \
shlq $6,%r/**/temp_reg ; \
addq _C_LABEL(HYPERVISOR_shared_info),%r/**/temp_reg ; \
movb $0,EVTCHN_UPCALL_MASK(%r/**/temp_reg)
#else /* XEN */
#define CLI(reg1,reg2) cli
#define STI(reg1,reg2) sti
#define CLI(temp_reg) cli
#define STI(temp_reg) sti
#endif /* XEN */
#endif /* _AMD64_MACHINE_FRAMEASM_H */