From: Borislav Petkov <[email protected]>

Pull it up into the header and kill duplicate versions. Separately, both
macros are identical:

 35948b2bd3431aee7149e85cfe4becbc  /tmp/a
 35948b2bd3431aee7149e85cfe4becbc  /tmp/b

Signed-off-by: Borislav Petkov <[email protected]>
---
 arch/x86/include/asm/asm.h          | 25 +++++++++++++++++++++++++
 arch/x86/lib/copy_user_64.S         | 24 ------------------------
 arch/x86/lib/copy_user_nocache_64.S | 24 ------------------------
 3 files changed, 25 insertions(+), 48 deletions(-)

diff --git a/arch/x86/include/asm/asm.h b/arch/x86/include/asm/asm.h
index 7730c1c5c83a..189679aba703 100644
--- a/arch/x86/include/asm/asm.h
+++ b/arch/x86/include/asm/asm.h
@@ -63,6 +63,31 @@
        _ASM_ALIGN ;                                            \
        _ASM_PTR (entry);                                       \
        .popsection
+
+.macro ALIGN_DESTINATION
+       /* check for bad alignment of destination */
+       movl %edi,%ecx
+       andl $7,%ecx
+       jz 102f                         /* already aligned */
+       subl $8,%ecx
+       negl %ecx
+       subl %ecx,%edx
+100:   movb (%rsi),%al
+101:   movb %al,(%rdi)
+       incq %rsi
+       incq %rdi
+       decl %ecx
+       jnz 100b
+102:
+       .section .fixup,"ax"
+103:   addl %ecx,%edx                  /* ecx is zerorest also */
+       jmp copy_user_handle_tail
+       .previous
+
+       _ASM_EXTABLE(100b,103b)
+       _ASM_EXTABLE(101b,103b)
+       .endm
+
 #else
 # define _ASM_EXTABLE(from,to)                                 \
        " .pushsection \"__ex_table\",\"a\"\n"                  \
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S
index fa997dfaef24..06ce685c3a5d 100644
--- a/arch/x86/lib/copy_user_64.S
+++ b/arch/x86/lib/copy_user_64.S
@@ -16,30 +16,6 @@
 #include <asm/asm.h>
 #include <asm/smap.h>
 
-       .macro ALIGN_DESTINATION
-       /* check for bad alignment of destination */
-       movl %edi,%ecx
-       andl $7,%ecx
-       jz 102f                         /* already aligned */
-       subl $8,%ecx
-       negl %ecx
-       subl %ecx,%edx
-100:   movb (%rsi),%al
-101:   movb %al,(%rdi)
-       incq %rsi
-       incq %rdi
-       decl %ecx
-       jnz 100b
-102:
-       .section .fixup,"ax"
-103:   addl %ecx,%edx                  /* ecx is zerorest also */
-       jmp copy_user_handle_tail
-       .previous
-
-       _ASM_EXTABLE(100b,103b)
-       _ASM_EXTABLE(101b,103b)
-       .endm
-
 /* Standard copy_to_user with segment limit checking */
 ENTRY(_copy_to_user)
        CFI_STARTPROC
diff --git a/arch/x86/lib/copy_user_nocache_64.S 
b/arch/x86/lib/copy_user_nocache_64.S
index 42eeb12e0cd9..b836a2bace15 100644
--- a/arch/x86/lib/copy_user_nocache_64.S
+++ b/arch/x86/lib/copy_user_nocache_64.S
@@ -14,30 +14,6 @@
 #include <asm/asm.h>
 #include <asm/smap.h>
 
-       .macro ALIGN_DESTINATION
-       /* check for bad alignment of destination */
-       movl %edi,%ecx
-       andl $7,%ecx
-       jz 102f                         /* already aligned */
-       subl $8,%ecx
-       negl %ecx
-       subl %ecx,%edx
-100:   movb (%rsi),%al
-101:   movb %al,(%rdi)
-       incq %rsi
-       incq %rdi
-       decl %ecx
-       jnz 100b
-102:
-       .section .fixup,"ax"
-103:   addl %ecx,%edx                  /* ecx is zerorest also */
-       jmp copy_user_handle_tail
-       .previous
-
-       _ASM_EXTABLE(100b,103b)
-       _ASM_EXTABLE(101b,103b)
-       .endm
-
 /*
  * copy_user_nocache - Uncached memory copy with exception handling
  * This will force destination/source out of cache for more performance.
-- 
2.3.5

--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to [email protected]
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Please read the FAQ at  http://www.tux.org/lkml/

Reply via email to