Commit acba44d2 authored by Peter Zijlstra's avatar Peter Zijlstra
Browse files

x86/copy_user_64: Remove .fixup usage



Place the anonymous .fixup code at the tail of the regular functions.

Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: default avatarJosh Poimboeuf <jpoimboe@redhat.com>
Reviewed-by: default avatarBorislav Petkov <bp@suse.de>
Link: https://lore.kernel.org/r/20211110101325.068505810@infradead.org
parent c6dbd3e5
Loading
Loading
Loading
Loading
+11 −21
Original line number Diff line number Diff line
@@ -32,13 +32,9 @@
	decl %ecx
	jnz 100b
102:
	.section .fixup,"ax"
103:	addl %ecx,%edx			/* ecx is zerorest also */
	jmp .Lcopy_user_handle_tail
	.previous

	_ASM_EXTABLE_CPY(100b, 103b)
	_ASM_EXTABLE_CPY(101b, 103b)
	_ASM_EXTABLE_CPY(100b, .Lcopy_user_handle_align)
	_ASM_EXTABLE_CPY(101b, .Lcopy_user_handle_align)
.endm

/*
@@ -107,7 +103,6 @@ SYM_FUNC_START(copy_user_generic_unrolled)
	ASM_CLAC
	RET

	.section .fixup,"ax"
30:	shll $6,%ecx
	addl %ecx,%edx
	jmp 60f
@@ -115,7 +110,6 @@ SYM_FUNC_START(copy_user_generic_unrolled)
	jmp 60f
50:	movl %ecx,%edx
60:	jmp .Lcopy_user_handle_tail /* ecx is zerorest also */
	.previous

	_ASM_EXTABLE_CPY(1b, 30b)
	_ASM_EXTABLE_CPY(2b, 30b)
@@ -166,20 +160,16 @@ SYM_FUNC_START(copy_user_generic_string)
	movl %edx,%ecx
	shrl $3,%ecx
	andl $7,%edx
1:	rep
	movsq
1:	rep movsq
2:	movl %edx,%ecx
3:	rep
	movsb
3:	rep movsb
	xorl %eax,%eax
	ASM_CLAC
	RET

	.section .fixup,"ax"
11:	leal (%rdx,%rcx,8),%ecx
12:	movl %ecx,%edx		/* ecx is zerorest also */
	jmp .Lcopy_user_handle_tail
	.previous

	_ASM_EXTABLE_CPY(1b, 11b)
	_ASM_EXTABLE_CPY(3b, 12b)
@@ -203,16 +193,13 @@ SYM_FUNC_START(copy_user_enhanced_fast_string)
	cmpl $64,%edx
	jb .L_copy_short_string	/* less then 64 bytes, avoid the costly 'rep' */
	movl %edx,%ecx
1:	rep
	movsb
1:	rep movsb
	xorl %eax,%eax
	ASM_CLAC
	RET

	.section .fixup,"ax"
12:	movl %ecx,%edx		/* ecx is zerorest also */
	jmp .Lcopy_user_handle_tail
	.previous

	_ASM_EXTABLE_CPY(1b, 12b)
SYM_FUNC_END(copy_user_enhanced_fast_string)
@@ -240,6 +227,11 @@ SYM_CODE_START_LOCAL(.Lcopy_user_handle_tail)
	RET

	_ASM_EXTABLE_CPY(1b, 2b)

.Lcopy_user_handle_align:
	addl %ecx,%edx			/* ecx is zerorest also */
	jmp .Lcopy_user_handle_tail

SYM_CODE_END(.Lcopy_user_handle_tail)

/*
@@ -350,7 +342,6 @@ SYM_FUNC_START(__copy_user_nocache)
	sfence
	RET

	.section .fixup,"ax"
.L_fixup_4x8b_copy:
	shll $6,%ecx
	addl %ecx,%edx
@@ -366,7 +357,6 @@ SYM_FUNC_START(__copy_user_nocache)
.L_fixup_handle_tail:
	sfence
	jmp .Lcopy_user_handle_tail
	.previous

	_ASM_EXTABLE_CPY(1b, .L_fixup_4x8b_copy)
	_ASM_EXTABLE_CPY(2b, .L_fixup_4x8b_copy)