From 8966b85c8f970d5eaf5e7ef16c057ca047abdc03 Mon Sep 17 00:00:00 2001 From: John Dyson Date: Tue, 8 Aug 1995 04:50:52 +0000 Subject: [PATCH] Make the spl oriented inline functions less likely to allow potentially volatile memory to be kept in registers during the "call" (inline expansion.) Do the same for pmap_update. --- sys/amd64/include/cpufunc.h | 9 +++++---- sys/i386/include/cpufunc.h | 9 +++++---- sys/i386/include/spl.h | 3 ++- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/sys/amd64/include/cpufunc.h b/sys/amd64/include/cpufunc.h index 117b59eb4739..92a5a0d458a8 100644 --- a/sys/amd64/include/cpufunc.h +++ b/sys/amd64/include/cpufunc.h @@ -30,7 +30,7 @@ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * - * $Id: cpufunc.h,v 1.37 1995/05/30 08:00:30 rgrimes Exp $ + * $Id: cpufunc.h,v 1.38 1995/07/25 21:28:47 bde Exp $ */ /* @@ -63,13 +63,13 @@ bdb(void) static __inline void disable_intr(void) { - __asm __volatile("cli"); + __asm __volatile("cli" : : : "memory"); } static __inline void enable_intr(void) { - __asm __volatile("sti"); + __asm __volatile("sti" : : : "memory"); } #define HAVE_INLINE_FFS @@ -268,7 +268,8 @@ pmap_update(void) * This should be implemented as load_cr3(rcr3()) when load_cr3() * is inlined. */ - __asm __volatile("movl %%cr3, %0; movl %0, %%cr3" : "=r" (temp)); + __asm __volatile("movl %%cr3, %0; movl %0, %%cr3" : "=r" (temp) : + : "memory"); } static __inline u_long diff --git a/sys/i386/include/cpufunc.h b/sys/i386/include/cpufunc.h index 117b59eb4739..92a5a0d458a8 100644 --- a/sys/i386/include/cpufunc.h +++ b/sys/i386/include/cpufunc.h @@ -30,7 +30,7 @@ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * - * $Id: cpufunc.h,v 1.37 1995/05/30 08:00:30 rgrimes Exp $ + * $Id: cpufunc.h,v 1.38 1995/07/25 21:28:47 bde Exp $ */ /* @@ -63,13 +63,13 @@ bdb(void) static __inline void disable_intr(void) { - __asm __volatile("cli"); + __asm __volatile("cli" : : : "memory"); } static __inline void enable_intr(void) { - __asm __volatile("sti"); + __asm __volatile("sti" : : : "memory"); } #define HAVE_INLINE_FFS @@ -268,7 +268,8 @@ pmap_update(void) * This should be implemented as load_cr3(rcr3()) when load_cr3() * is inlined. */ - __asm __volatile("movl %%cr3, %0; movl %0, %%cr3" : "=r" (temp)); + __asm __volatile("movl %%cr3, %0; movl %0, %%cr3" : "=r" (temp) : + : "memory"); } static __inline u_long diff --git a/sys/i386/include/spl.h b/sys/i386/include/spl.h index 1db479fe5ddc..57aff7c0667b 100644 --- a/sys/i386/include/spl.h +++ b/sys/i386/include/spl.h @@ -30,7 +30,7 @@ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * - * $Id: spl.h,v 1.7 1995/05/11 00:13:01 wollman Exp $ + * $Id: spl.h,v 1.8 1995/05/11 07:44:16 bde Exp $ */ #ifndef _MACHINE_IPL_H_ @@ -105,6 +105,7 @@ static __inline int name(void) \ { \ unsigned x; \ \ + asm volatile("":::"memory"); \ x = cpl; \ set_cpl; \ return (x); \