This fixes the interrupt-return part of the MSR_VSX restore bug caught
by tm-unavailable selftest.

Signed-off-by: Nicholas Piggin <npig...@gmail.com>
---
 arch/powerpc/kernel/syscall_64.c | 24 +++++++++++++-----------
 1 file changed, 13 insertions(+), 11 deletions(-)

diff --git a/arch/powerpc/kernel/syscall_64.c b/arch/powerpc/kernel/syscall_64.c
index 56533a26f3b7..a2995909b83b 100644
--- a/arch/powerpc/kernel/syscall_64.c
+++ b/arch/powerpc/kernel/syscall_64.c
@@ -251,19 +251,21 @@ notrace unsigned long interrupt_exit_user_prepare(struct 
pt_regs *regs, unsigned
                ti_flags = READ_ONCE(*ti_flagsp);
        }
 
-       if (IS_ENABLED(CONFIG_PPC_BOOK3S)) {
-               unsigned long mathflags = 0;
-
-               if (IS_ENABLED(CONFIG_PPC_FPU))
-                       mathflags |= MSR_FP;
-               if (IS_ENABLED(CONFIG_ALTIVEC))
-                       mathflags |= MSR_VEC;
-
+       if (IS_ENABLED(CONFIG_PPC_BOOK3S) && IS_ENABLED(CONFIG_PPC_FPU)) {
                if (IS_ENABLED(CONFIG_PPC_TRANSACTIONAL_MEM) &&
-                                               (ti_flags & _TIF_RESTORE_TM))
+                               unlikely((ti_flags & _TIF_RESTORE_TM))) {
                        restore_tm_state(regs);
-               else if ((regs->msr & mathflags) != mathflags)
-                       restore_math(regs);
+               } else {
+                       unsigned long mathflags = MSR_FP;
+
+                       if (cpu_has_feature(CPU_FTR_VSX))
+                               mathflags |= MSR_VEC | MSR_VSX;
+                       else if (cpu_has_feature(CPU_FTR_ALTIVEC))
+                               mathflags |= MSR_VEC;
+
+                       if ((regs->msr & mathflags) != mathflags)
+                               restore_math(regs);
+               }
        }
 
        trace_hardirqs_on();
-- 
2.23.0

Reply via email to