x86/asm/uaccess: Unify the ALIGN_DESTINATION macro
authorBorislav Petkov <bp@suse.de>
Wed, 13 May 2015 17:42:23 +0000 (19:42 +0200)
committerIngo Molnar <mingo@kernel.org>
Thu, 14 May 2015 05:25:34 +0000 (07:25 +0200)
Pull it up into the header and kill duplicate versions.
Separately, both macros are identical:

 35948b2bd3431aee7149e85cfe4becbc  /tmp/a
 35948b2bd3431aee7149e85cfe4becbc  /tmp/b

Signed-off-by: Borislav Petkov <bp@suse.de>
Cc: Andy Lutomirski <luto@amacapital.net>
Cc: Borislav Petkov <bp@alien8.de>
Cc: Brian Gerst <brgerst@gmail.com>
Cc: Denys Vlasenko <dvlasenk@redhat.com>
Cc: H. Peter Anvin <hpa@zytor.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: http://lkml.kernel.org/r/1431538944-27724-3-git-send-email-bp@alien8.de
Signed-off-by: Ingo Molnar <mingo@kernel.org>
arch/x86/include/asm/asm.h
arch/x86/lib/copy_user_64.S
arch/x86/lib/copy_user_nocache_64.S

index 7730c1c5c83aa7aaf6859170f812ad1f410c1a0b..189679aba703537393b87d699a4e11cbfe94e23c 100644 (file)
        _ASM_ALIGN ;                                            \
        _ASM_PTR (entry);                                       \
        .popsection
+
+.macro ALIGN_DESTINATION
+       /* check for bad alignment of destination */
+       movl %edi,%ecx
+       andl $7,%ecx
+       jz 102f                         /* already aligned */
+       subl $8,%ecx
+       negl %ecx
+       subl %ecx,%edx
+100:   movb (%rsi),%al
+101:   movb %al,(%rdi)
+       incq %rsi
+       incq %rdi
+       decl %ecx
+       jnz 100b
+102:
+       .section .fixup,"ax"
+103:   addl %ecx,%edx                  /* ecx is zerorest also */
+       jmp copy_user_handle_tail
+       .previous
+
+       _ASM_EXTABLE(100b,103b)
+       _ASM_EXTABLE(101b,103b)
+       .endm
+
 #else
 # define _ASM_EXTABLE(from,to)                                 \
        " .pushsection \"__ex_table\",\"a\"\n"                  \
index fa997dfaef242fa9abdb28c20658a939caf72697..06ce685c3a5dacd8d99939662cc35dd783f0c36b 100644 (file)
 #include <asm/asm.h>
 #include <asm/smap.h>
 
-       .macro ALIGN_DESTINATION
-       /* check for bad alignment of destination */
-       movl %edi,%ecx
-       andl $7,%ecx
-       jz 102f                         /* already aligned */
-       subl $8,%ecx
-       negl %ecx
-       subl %ecx,%edx
-100:   movb (%rsi),%al
-101:   movb %al,(%rdi)
-       incq %rsi
-       incq %rdi
-       decl %ecx
-       jnz 100b
-102:
-       .section .fixup,"ax"
-103:   addl %ecx,%edx                  /* ecx is zerorest also */
-       jmp copy_user_handle_tail
-       .previous
-
-       _ASM_EXTABLE(100b,103b)
-       _ASM_EXTABLE(101b,103b)
-       .endm
-
 /* Standard copy_to_user with segment limit checking */
 ENTRY(_copy_to_user)
        CFI_STARTPROC
index 42eeb12e0cd9b8e3220b78ece0acdd7ebcbd3dcc..b836a2bace15af28804c9b21cfa0bd21cd9b9fd3 100644 (file)
 #include <asm/asm.h>
 #include <asm/smap.h>
 
-       .macro ALIGN_DESTINATION
-       /* check for bad alignment of destination */
-       movl %edi,%ecx
-       andl $7,%ecx
-       jz 102f                         /* already aligned */
-       subl $8,%ecx
-       negl %ecx
-       subl %ecx,%edx
-100:   movb (%rsi),%al
-101:   movb %al,(%rdi)
-       incq %rsi
-       incq %rdi
-       decl %ecx
-       jnz 100b
-102:
-       .section .fixup,"ax"
-103:   addl %ecx,%edx                  /* ecx is zerorest also */
-       jmp copy_user_handle_tail
-       .previous
-
-       _ASM_EXTABLE(100b,103b)
-       _ASM_EXTABLE(101b,103b)
-       .endm
-
 /*
  * copy_user_nocache - Uncached memory copy with exception handling
  * This will force destination/source out of cache for more performance.