Commit 295cf156 authored by Robin Murphy's avatar Robin Murphy Committed by Will Deacon
Browse files

arm64: Avoid premature usercopy failure



Al reminds us that the usercopy API must only return complete failure
if absolutely nothing could be copied. Currently, if userspace does
something silly like giving us an unaligned pointer to Device memory,
or a size which overruns MTE tag bounds, we may fail to honour that
requirement when faulting on a multi-byte access even though a smaller
access could have succeeded.

Add a mitigation to the fixup routines to fall back to a single-byte
copy if we faulted on a larger access before anything has been written
to the destination, to guarantee making *some* forward progress. We
needn't be too concerned about the overall performance since this should
only occur when callers are doing something a bit dodgy in the first
place. Particularly broken userspace might still be able to trick
generic_perform_write() into an infinite loop by targeting write() at
an mmap() of some read-only device register where the fault-in load
succeeds but any store synchronously aborts such that copy_to_user() is
genuinely unable to make progress, but, well, don't do that...

CC: stable@vger.kernel.org
Reported-by: default avatarChen Huang <chenhuang5@huawei.com>
Suggested-by: default avatarAl Viro <viro@zeniv.linux.org.uk>
Reviewed-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Signed-off-by: default avatarRobin Murphy <robin.murphy@arm.com>
Link: https://lore.kernel.org/r/dc03d5c675731a1f24a62417dba5429ad744234e.1626098433.git.robin.murphy@arm.com


Signed-off-by: default avatarWill Deacon <will@kernel.org>
parent 8cdd23c2
Loading
Loading
Loading
Loading
+10 −3
Original line number Diff line number Diff line
@@ -29,7 +29,7 @@
	.endm

	.macro ldrh1 reg, ptr, val
	user_ldst 9998f, ldtrh, \reg, \ptr, \val
	user_ldst 9997f, ldtrh, \reg, \ptr, \val
	.endm

	.macro strh1 reg, ptr, val
@@ -37,7 +37,7 @@
	.endm

	.macro ldr1 reg, ptr, val
	user_ldst 9998f, ldtr, \reg, \ptr, \val
	user_ldst 9997f, ldtr, \reg, \ptr, \val
	.endm

	.macro str1 reg, ptr, val
@@ -45,7 +45,7 @@
	.endm

	.macro ldp1 reg1, reg2, ptr, val
	user_ldp 9998f, \reg1, \reg2, \ptr, \val
	user_ldp 9997f, \reg1, \reg2, \ptr, \val
	.endm

	.macro stp1 reg1, reg2, ptr, val
@@ -53,8 +53,10 @@
	.endm

end	.req	x5
srcin	.req	x15
SYM_FUNC_START(__arch_copy_from_user)
	add	end, x0, x2
	mov	srcin, x1
#include "copy_template.S"
	mov	x0, #0				// Nothing to copy
	ret
@@ -63,6 +65,11 @@ EXPORT_SYMBOL(__arch_copy_from_user)

	.section .fixup,"ax"
	.align	2
9997:	cmp	dst, dstin
	b.ne	9998f
	// Before being absolutely sure we couldn't copy anything, try harder
USER(9998f, ldtrb tmp1w, [srcin])
	strb	tmp1w, [dst], #1
9998:	sub	x0, end, dst			// bytes not copied
	ret
	.previous
+14 −7
Original line number Diff line number Diff line
@@ -30,33 +30,34 @@
	.endm

	.macro ldrh1 reg, ptr, val
	user_ldst 9998f, ldtrh, \reg, \ptr, \val
	user_ldst 9997f, ldtrh, \reg, \ptr, \val
	.endm

	.macro strh1 reg, ptr, val
	user_ldst 9998f, sttrh, \reg, \ptr, \val
	user_ldst 9997f, sttrh, \reg, \ptr, \val
	.endm

	.macro ldr1 reg, ptr, val
	user_ldst 9998f, ldtr, \reg, \ptr, \val
	user_ldst 9997f, ldtr, \reg, \ptr, \val
	.endm

	.macro str1 reg, ptr, val
	user_ldst 9998f, sttr, \reg, \ptr, \val
	user_ldst 9997f, sttr, \reg, \ptr, \val
	.endm

	.macro ldp1 reg1, reg2, ptr, val
	user_ldp 9998f, \reg1, \reg2, \ptr, \val
	user_ldp 9997f, \reg1, \reg2, \ptr, \val
	.endm

	.macro stp1 reg1, reg2, ptr, val
	user_stp 9998f, \reg1, \reg2, \ptr, \val
	user_stp 9997f, \reg1, \reg2, \ptr, \val
	.endm

end	.req	x5

srcin	.req	x15
SYM_FUNC_START(__arch_copy_in_user)
	add	end, x0, x2
	mov	srcin, x1
#include "copy_template.S"
	mov	x0, #0
	ret
@@ -65,6 +66,12 @@ EXPORT_SYMBOL(__arch_copy_in_user)

	.section .fixup,"ax"
	.align	2
9997:	cmp	dst, dstin
	b.ne	9998f
	// Before being absolutely sure we couldn't copy anything, try harder
USER(9998f, ldtrb tmp1w, [srcin])
USER(9998f, sttrb tmp1w, [dst])
	add	dst, dst, #1
9998:	sub	x0, end, dst			// bytes not copied
	ret
	.previous
+11 −3
Original line number Diff line number Diff line
@@ -32,7 +32,7 @@
	.endm

	.macro strh1 reg, ptr, val
	user_ldst 9998f, sttrh, \reg, \ptr, \val
	user_ldst 9997f, sttrh, \reg, \ptr, \val
	.endm

	.macro ldr1 reg, ptr, val
@@ -40,7 +40,7 @@
	.endm

	.macro str1 reg, ptr, val
	user_ldst 9998f, sttr, \reg, \ptr, \val
	user_ldst 9997f, sttr, \reg, \ptr, \val
	.endm

	.macro ldp1 reg1, reg2, ptr, val
@@ -48,12 +48,14 @@
	.endm

	.macro stp1 reg1, reg2, ptr, val
	user_stp 9998f, \reg1, \reg2, \ptr, \val
	user_stp 9997f, \reg1, \reg2, \ptr, \val
	.endm

end	.req	x5
srcin	.req	x15
SYM_FUNC_START(__arch_copy_to_user)
	add	end, x0, x2
	mov	srcin, x1
#include "copy_template.S"
	mov	x0, #0
	ret
@@ -62,6 +64,12 @@ EXPORT_SYMBOL(__arch_copy_to_user)

	.section .fixup,"ax"
	.align	2
9997:	cmp	dst, dstin
	b.ne	9998f
	// Before being absolutely sure we couldn't copy anything, try harder
	ldrb	tmp1w, [srcin]
USER(9998f, sttrb tmp1w, [dst])
	add	dst, dst, #1
9998:	sub	x0, end, dst			// bytes not copied
	ret
	.previous