Commit-ID:  f01703b3d2e6faf7233cedf78f1e2d31b39fa90f
Gitweb:     https://git.kernel.org/tip/f01703b3d2e6faf7233cedf78f1e2d31b39fa90f
Author:     Vincenzo Frascino <vincenzo.frasc...@arm.com>
AuthorDate: Fri, 21 Jun 2019 10:52:41 +0100
Committer:  Thomas Gleixner <t...@linutronix.de>
CommitDate: Sat, 22 Jun 2019 21:21:09 +0200

arm64: compat: Get sigreturn trampolines from vDSO

When the compat vDSO is enabled, the sigreturn trampolines are not
anymore available through [sigpage] but through [vdso].

Add the relevant code the enable the feature.

Signed-off-by: Vincenzo Frascino <vincenzo.frasc...@arm.com>
Signed-off-by: Thomas Gleixner <t...@linutronix.de>
Tested-by: Shijith Thotton <sthot...@marvell.com>
Tested-by: Andre Przywara <andre.przyw...@arm.com>
Cc: linux-a...@vger.kernel.org
Cc: linux-arm-ker...@lists.infradead.org
Cc: linux-m...@vger.kernel.org
Cc: linux-kselft...@vger.kernel.org
Cc: Catalin Marinas <catalin.mari...@arm.com>
Cc: Will Deacon <will.dea...@arm.com>
Cc: Arnd Bergmann <a...@arndb.de>
Cc: Russell King <li...@armlinux.org.uk>
Cc: Ralf Baechle <r...@linux-mips.org>
Cc: Paul Burton <paul.bur...@mips.com>
Cc: Daniel Lezcano <daniel.lezc...@linaro.org>
Cc: Mark Salyzyn <saly...@android.com>
Cc: Peter Collingbourne <p...@google.com>
Cc: Shuah Khan <sh...@kernel.org>
Cc: Dmitry Safonov <0x7f454...@gmail.com>
Cc: Rasmus Villemoes <li...@rasmusvillemoes.dk>
Cc: Huw Davies <h...@codeweavers.com>
Link: 
https://lkml.kernel.org/r/20190621095252.32307-15-vincenzo.frasc...@arm.com

---
 arch/arm64/include/asm/vdso.h |  3 +++
 arch/arm64/kernel/signal32.c  | 26 ++++++++++++++++++++++++++
 2 files changed, 29 insertions(+)

diff --git a/arch/arm64/include/asm/vdso.h b/arch/arm64/include/asm/vdso.h
index 1f94ec19903c..9c15e0a06301 100644
--- a/arch/arm64/include/asm/vdso.h
+++ b/arch/arm64/include/asm/vdso.h
@@ -17,6 +17,9 @@
 #ifndef __ASSEMBLY__
 
 #include <generated/vdso-offsets.h>
+#ifdef CONFIG_COMPAT_VDSO
+#include <generated/vdso32-offsets.h>
+#endif
 
 #define VDSO_SYMBOL(base, name)                                                
   \
 ({                                                                        \
diff --git a/arch/arm64/kernel/signal32.c b/arch/arm64/kernel/signal32.c
index 8a9a5ceb63b7..12a585386c2f 100644
--- a/arch/arm64/kernel/signal32.c
+++ b/arch/arm64/kernel/signal32.c
@@ -18,6 +18,7 @@
 #include <asm/traps.h>
 #include <linux/uaccess.h>
 #include <asm/unistd.h>
+#include <asm/vdso.h>
 
 struct compat_vfp_sigframe {
        compat_ulong_t  magic;
@@ -341,6 +342,30 @@ static void compat_setup_return(struct pt_regs *regs, 
struct k_sigaction *ka,
                retcode = ptr_to_compat(ka->sa.sa_restorer);
        } else {
                /* Set up sigreturn pointer */
+#ifdef CONFIG_COMPAT_VDSO
+               void *vdso_base = current->mm->context.vdso;
+               void *vdso_trampoline;
+
+               if (ka->sa.sa_flags & SA_SIGINFO) {
+                       if (thumb) {
+                               vdso_trampoline = VDSO_SYMBOL(vdso_base,
+                                                       
compat_rt_sigreturn_thumb);
+                       } else {
+                               vdso_trampoline = VDSO_SYMBOL(vdso_base,
+                                                       
compat_rt_sigreturn_arm);
+                       }
+               } else {
+                       if (thumb) {
+                               vdso_trampoline = VDSO_SYMBOL(vdso_base,
+                                                       compat_sigreturn_thumb);
+                       } else {
+                               vdso_trampoline = VDSO_SYMBOL(vdso_base,
+                                                       compat_sigreturn_arm);
+                       }
+               }
+
+               retcode = ptr_to_compat(vdso_trampoline) + thumb;
+#else
                unsigned int idx = thumb << 1;
 
                if (ka->sa.sa_flags & SA_SIGINFO)
@@ -348,6 +373,7 @@ static void compat_setup_return(struct pt_regs *regs, 
struct k_sigaction *ka,
 
                retcode = (unsigned long)current->mm->context.vdso +
                          (idx << 2) + thumb;
+#endif
        }
 
        regs->regs[0]   = usig;

Reply via email to