From cfa5644b2b9f5b62453433320ae63ccec5026bd2 Mon Sep 17 00:00:00 2001 From: John Dyson Date: Tue, 12 May 1998 18:37:10 +0000 Subject: [PATCH] Some temporary fixes to SMP to make it more scheduling and signal friendly. This is a result of discussions on the mailing lists. Kudos to those who have found the issue and created work-arounds. I have chosen Tor's fix for now, before we can all work the issue more completely. Submitted by: Tor Egge --- sys/amd64/amd64/cpu_switch.S | 16 +++++++++++++--- sys/amd64/amd64/swtch.s | 16 +++++++++++++--- sys/i386/i386/swtch.s | 16 +++++++++++++--- 3 files changed, 39 insertions(+), 9 deletions(-) diff --git a/sys/amd64/amd64/cpu_switch.S b/sys/amd64/amd64/cpu_switch.S index 7a108dd2cd5..81afd303787 100644 --- a/sys/amd64/amd64/cpu_switch.S +++ b/sys/amd64/amd64/cpu_switch.S @@ -33,7 +33,7 @@ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * - * $Id: swtch.s,v 1.70 1998/03/28 11:49:31 dufault Exp $ + * $Id: swtch.s,v 1.71 1998/04/06 15:44:31 peter Exp $ */ #include "npx.h" @@ -49,6 +49,8 @@ #include #include #include /** GRAB_LOPRIO */ +#include +#include #endif /* SMP */ #include "assym.s" @@ -308,6 +310,10 @@ _idle: * * XXX: we had damn well better be sure we had it before doing this! */ + CPL_LOCK /* XXX */ + andl $~SWI_AST_MASK, _ipending /* XXX */ + movl $0, _cpl /* XXX Allow ASTs on other CPU */ + CPL_UNLOCK /* XXX */ movl $FREE_LOCK, %eax movl %eax, _mp_lock @@ -357,16 +363,20 @@ idle_loop: jmp idle_loop 3: -#ifdef SMP movl $LOPRIO_LEVEL, lapic_tpr /* arbitrate for INTs */ -#endif call _get_mplock + CPL_LOCK /* XXX */ + movl $SWI_AST_MASK, _cpl /* XXX Disallow ASTs on other CPU */ + CPL_UNLOCK /* XXX */ cmpl $0,_whichrtqs /* real-time queue */ CROSSJUMP(jne, sw1a, je) cmpl $0,_whichqs /* normal queue */ CROSSJUMP(jne, nortqr, je) cmpl $0,_whichidqs /* 'idle' queue */ CROSSJUMP(jne, idqr, je) + CPL_LOCK /* XXX */ + movl $0, _cpl /* XXX Allow ASTs on other CPU */ + CPL_UNLOCK /* XXX */ call _rel_mplock jmp idle_loop diff --git a/sys/amd64/amd64/swtch.s b/sys/amd64/amd64/swtch.s index 7a108dd2cd5..81afd303787 100644 --- a/sys/amd64/amd64/swtch.s +++ b/sys/amd64/amd64/swtch.s @@ -33,7 +33,7 @@ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * - * $Id: swtch.s,v 1.70 1998/03/28 11:49:31 dufault Exp $ + * $Id: swtch.s,v 1.71 1998/04/06 15:44:31 peter Exp $ */ #include "npx.h" @@ -49,6 +49,8 @@ #include #include #include /** GRAB_LOPRIO */ +#include +#include #endif /* SMP */ #include "assym.s" @@ -308,6 +310,10 @@ _idle: * * XXX: we had damn well better be sure we had it before doing this! */ + CPL_LOCK /* XXX */ + andl $~SWI_AST_MASK, _ipending /* XXX */ + movl $0, _cpl /* XXX Allow ASTs on other CPU */ + CPL_UNLOCK /* XXX */ movl $FREE_LOCK, %eax movl %eax, _mp_lock @@ -357,16 +363,20 @@ idle_loop: jmp idle_loop 3: -#ifdef SMP movl $LOPRIO_LEVEL, lapic_tpr /* arbitrate for INTs */ -#endif call _get_mplock + CPL_LOCK /* XXX */ + movl $SWI_AST_MASK, _cpl /* XXX Disallow ASTs on other CPU */ + CPL_UNLOCK /* XXX */ cmpl $0,_whichrtqs /* real-time queue */ CROSSJUMP(jne, sw1a, je) cmpl $0,_whichqs /* normal queue */ CROSSJUMP(jne, nortqr, je) cmpl $0,_whichidqs /* 'idle' queue */ CROSSJUMP(jne, idqr, je) + CPL_LOCK /* XXX */ + movl $0, _cpl /* XXX Allow ASTs on other CPU */ + CPL_UNLOCK /* XXX */ call _rel_mplock jmp idle_loop diff --git a/sys/i386/i386/swtch.s b/sys/i386/i386/swtch.s index 7a108dd2cd5..81afd303787 100644 --- a/sys/i386/i386/swtch.s +++ b/sys/i386/i386/swtch.s @@ -33,7 +33,7 @@ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * - * $Id: swtch.s,v 1.70 1998/03/28 11:49:31 dufault Exp $ + * $Id: swtch.s,v 1.71 1998/04/06 15:44:31 peter Exp $ */ #include "npx.h" @@ -49,6 +49,8 @@ #include #include #include /** GRAB_LOPRIO */ +#include +#include #endif /* SMP */ #include "assym.s" @@ -308,6 +310,10 @@ _idle: * * XXX: we had damn well better be sure we had it before doing this! */ + CPL_LOCK /* XXX */ + andl $~SWI_AST_MASK, _ipending /* XXX */ + movl $0, _cpl /* XXX Allow ASTs on other CPU */ + CPL_UNLOCK /* XXX */ movl $FREE_LOCK, %eax movl %eax, _mp_lock @@ -357,16 +363,20 @@ idle_loop: jmp idle_loop 3: -#ifdef SMP movl $LOPRIO_LEVEL, lapic_tpr /* arbitrate for INTs */ -#endif call _get_mplock + CPL_LOCK /* XXX */ + movl $SWI_AST_MASK, _cpl /* XXX Disallow ASTs on other CPU */ + CPL_UNLOCK /* XXX */ cmpl $0,_whichrtqs /* real-time queue */ CROSSJUMP(jne, sw1a, je) cmpl $0,_whichqs /* normal queue */ CROSSJUMP(jne, nortqr, je) cmpl $0,_whichidqs /* 'idle' queue */ CROSSJUMP(jne, idqr, je) + CPL_LOCK /* XXX */ + movl $0, _cpl /* XXX Allow ASTs on other CPU */ + CPL_UNLOCK /* XXX */ call _rel_mplock jmp idle_loop