Skip to content

Commit 9e6b13f

Browse files
suryasaimadhuIngo Molnar
authored and
Ingo Molnar
committed
x86/asm/uaccess: Unify the ALIGN_DESTINATION macro
Pull it up into the header and kill duplicate versions. Separately, both macros are identical: 35948b2bd3431aee7149e85cfe4becbc /tmp/a 35948b2bd3431aee7149e85cfe4becbc /tmp/b Signed-off-by: Borislav Petkov <[email protected]> Cc: Andy Lutomirski <[email protected]> Cc: Borislav Petkov <[email protected]> Cc: Brian Gerst <[email protected]> Cc: Denys Vlasenko <[email protected]> Cc: H. Peter Anvin <[email protected]> Cc: Linus Torvalds <[email protected]> Cc: Peter Zijlstra <[email protected]> Cc: Thomas Gleixner <[email protected]> Link: http://lkml.kernel.org/r/[email protected] Signed-off-by: Ingo Molnar <[email protected]>
1 parent 26e7d9d commit 9e6b13f

File tree

3 files changed

+25
-48
lines changed

3 files changed

+25
-48
lines changed

arch/x86/include/asm/asm.h

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,31 @@
6363
_ASM_ALIGN ; \
6464
_ASM_PTR (entry); \
6565
.popsection
66+
67+
.macro ALIGN_DESTINATION
68+
/* check for bad alignment of destination */
69+
movl %edi,%ecx
70+
andl $7,%ecx
71+
jz 102f /* already aligned */
72+
subl $8,%ecx
73+
negl %ecx
74+
subl %ecx,%edx
75+
100: movb (%rsi),%al
76+
101: movb %al,(%rdi)
77+
incq %rsi
78+
incq %rdi
79+
decl %ecx
80+
jnz 100b
81+
102:
82+
.section .fixup,"ax"
83+
103: addl %ecx,%edx /* ecx is zerorest also */
84+
jmp copy_user_handle_tail
85+
.previous
86+
87+
_ASM_EXTABLE(100b,103b)
88+
_ASM_EXTABLE(101b,103b)
89+
.endm
90+
6691
#else
6792
# define _ASM_EXTABLE(from,to) \
6893
" .pushsection \"__ex_table\",\"a\"\n" \

arch/x86/lib/copy_user_64.S

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -16,30 +16,6 @@
1616
#include <asm/asm.h>
1717
#include <asm/smap.h>
1818

19-
.macro ALIGN_DESTINATION
20-
/* check for bad alignment of destination */
21-
movl %edi,%ecx
22-
andl $7,%ecx
23-
jz 102f /* already aligned */
24-
subl $8,%ecx
25-
negl %ecx
26-
subl %ecx,%edx
27-
100: movb (%rsi),%al
28-
101: movb %al,(%rdi)
29-
incq %rsi
30-
incq %rdi
31-
decl %ecx
32-
jnz 100b
33-
102:
34-
.section .fixup,"ax"
35-
103: addl %ecx,%edx /* ecx is zerorest also */
36-
jmp copy_user_handle_tail
37-
.previous
38-
39-
_ASM_EXTABLE(100b,103b)
40-
_ASM_EXTABLE(101b,103b)
41-
.endm
42-
4319
/* Standard copy_to_user with segment limit checking */
4420
ENTRY(_copy_to_user)
4521
CFI_STARTPROC

arch/x86/lib/copy_user_nocache_64.S

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -14,30 +14,6 @@
1414
#include <asm/asm.h>
1515
#include <asm/smap.h>
1616

17-
.macro ALIGN_DESTINATION
18-
/* check for bad alignment of destination */
19-
movl %edi,%ecx
20-
andl $7,%ecx
21-
jz 102f /* already aligned */
22-
subl $8,%ecx
23-
negl %ecx
24-
subl %ecx,%edx
25-
100: movb (%rsi),%al
26-
101: movb %al,(%rdi)
27-
incq %rsi
28-
incq %rdi
29-
decl %ecx
30-
jnz 100b
31-
102:
32-
.section .fixup,"ax"
33-
103: addl %ecx,%edx /* ecx is zerorest also */
34-
jmp copy_user_handle_tail
35-
.previous
36-
37-
_ASM_EXTABLE(100b,103b)
38-
_ASM_EXTABLE(101b,103b)
39-
.endm
40-
4117
/*
4218
* copy_user_nocache - Uncached memory copy with exception handling
4319
* This will force destination/source out of cache for more performance.

0 commit comments

Comments
 (0)