Module Name:    src
Committed By:   bouyer
Date:           Sun Jun 21 16:57:18 UTC 2020

Modified Files:
        src/sys/arch/amd64/amd64: amd64_trap.S locore.S

Log Message:
On amd64, Xen PV calls syscalls and traps with events enabled.
Disable events on entry to be safe.
It should have been mostly safe for most cases, but for FPU traps
we need to reload the FPU state if we got interrupted at trap entry.

Hopefully fixes:
panic: kernel diagnostic assertion "curlwp->l_md.md_flags & MDL_FPU_IN_CPU" 
failed: file "/home/source/ab/HEAD/src/sys/arch/x86/x86/fpu.c", line 524

when running tests.


To generate a diff of this commit:
cvs rdiff -u -r1.51 -r1.52 src/sys/arch/amd64/amd64/amd64_trap.S
cvs rdiff -u -r1.209 -r1.210 src/sys/arch/amd64/amd64/locore.S

Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.

Modified files:

Index: src/sys/arch/amd64/amd64/amd64_trap.S
diff -u src/sys/arch/amd64/amd64/amd64_trap.S:1.51 src/sys/arch/amd64/amd64/amd64_trap.S:1.52
--- src/sys/arch/amd64/amd64/amd64_trap.S:1.51	Sat Dec  7 10:19:35 2019
+++ src/sys/arch/amd64/amd64/amd64_trap.S	Sun Jun 21 16:57:18 2020
@@ -1,4 +1,4 @@
-/*	$NetBSD: amd64_trap.S,v 1.51 2019/12/07 10:19:35 maxv Exp $	*/
+/*	$NetBSD: amd64_trap.S,v 1.52 2020/06/21 16:57:18 bouyer Exp $	*/
 
 /*
  * Copyright (c) 1998, 2007, 2008, 2017 The NetBSD Foundation, Inc.
@@ -90,7 +90,7 @@
  */
 
 #ifdef	XENPV
-#define	PRE_TRAP	movq (%rsp),%rcx ; movq 8(%rsp),%r11 ; addq $0x10,%rsp
+#define	PRE_TRAP	CLI(cx); movq (%rsp),%rcx ; movq 8(%rsp),%r11 ; addq $0x10,%rsp
 #else
 #define	PRE_TRAP
 #endif
@@ -231,9 +231,9 @@ IDTVEC(trap01)
 	movw	%ds,TF_DS(%rsp)
 
 	jmp	.Lalltraps_noentry
-#else
+#else /* !XENPV */
 	ZTRAP(T_TRCTRAP)
-#endif
+#endif /* !XENPV */
 IDTVEC_END(trap01)
 
 /*
@@ -250,7 +250,7 @@ IDTVEC_END(trap01)
 IDTVEC(trap02)
 #if defined(XENPV)
 	ZTRAP(T_NMI)
-#else
+#else /* XENPV */
 	ZTRAP_NJ(T_NMI)
 	subq	$TF_REGSIZE,%rsp
 	INTR_SAVE_GPRS
@@ -299,7 +299,7 @@ IDTVEC(trap02)
 	INTR_RESTORE_GPRS
 	addq	$TF_REGSIZE+16,%rsp
 	iretq
-#endif
+#endif /* XENPV */
 IDTVEC_END(trap02)
 
 IDTVEC(trap03)
@@ -361,7 +361,7 @@ IDTVEC_END(trap07)
 IDTVEC(trap08)
 #if defined(XENPV)
 	TRAP(T_DOUBLEFLT)
-#else
+#else /* XENPV */
 	TRAP_NJ(T_DOUBLEFLT)
 	subq	$TF_REGSIZE,%rsp
 	INTR_SAVE_GPRS
@@ -396,7 +396,7 @@ IDTVEC(trap08)
 	INTR_RESTORE_GPRS
 	addq	$TF_REGSIZE+16,%rsp
 	iretq
-#endif
+#endif /* XENPV */
 IDTVEC_END(trap08)
 
 IDTVEC(trap09)
@@ -414,7 +414,7 @@ IDTVEC_END(trap10)
  * in order to copy the user segment registers into the fault frame.
  */
 #define kernuser_reenter alltraps
-#endif
+#endif /* XENPV */
 
 IDTVEC(trap11)		/* #NP() Segment not present */
 	TRAP_NJ(T_SEGNPFLT)
@@ -448,6 +448,14 @@ IDTVEC(trap16)
 	ZTRAP_NJ(T_ARITHTRAP)
 .Ldo_fputrap:
 	INTRENTRY
+#ifdef XENPV
+	/* traps are called with interrupts enabled, and we may have been
+	 * interrupted just before the CLI in the trap macro.
+	 * we have to check if a FPU reload is needed.
+	 */ 
+	movq    CPUVAR(CURLWP),%r14
+	HANDLE_DEFERRED_FPU
+#endif /* XENPV */
 #ifdef DIAGNOSTIC
 	movl	CPUVAR(ILEVEL),%ebx
 #endif

Index: src/sys/arch/amd64/amd64/locore.S
diff -u src/sys/arch/amd64/amd64/locore.S:1.209 src/sys/arch/amd64/amd64/locore.S:1.210
--- src/sys/arch/amd64/amd64/locore.S:1.209	Wed May 27 19:33:40 2020
+++ src/sys/arch/amd64/amd64/locore.S	Sun Jun 21 16:57:18 2020
@@ -1,4 +1,4 @@
-/*	$NetBSD: locore.S,v 1.209 2020/05/27 19:33:40 ad Exp $	*/
+/*	$NetBSD: locore.S,v 1.210 2020/06/21 16:57:18 bouyer Exp $	*/
 
 /*
  * Copyright-o-rama!
@@ -1481,7 +1481,13 @@ IDTVEC(\name)
 	movq	$2,TF_ERR(%rsp)		/* syscall instruction size */
 	movq	$T_ASTFLT,TF_TRAPNO(%rsp)
 #else
-	/* Xen already switched to kernel stack */
+	/*
+	 * Xen already switched to kernel stack.
+	 * But it didn't disable events
+	 */
+	pushq	%rsi
+	CLI(si)
+	popq	%rsi
 	addq	$0x10,%rsp	/* gap to match cs:rip */
 	pushq	$2		/* error code */
 	pushq	$T_ASTFLT
@@ -1524,6 +1530,9 @@ IDTVEC_END(syscall32)
 	TEXT_USER_BEGIN
 IDTVEC(osyscall)
 #ifdef XENPV
+	pushq	%rsi
+	CLI(si)
+	popq	%rsi
 	movq (%rsp),%rcx
 	movq 8(%rsp),%r11
 	addq $0x10,%rsp

Reply via email to