提交 1b1b4ded 编写于 作者: R Robin Murphy 提交者: Yang Yingliang

arm64: Avoid premature usercopy failure

mainline inclusion
from mainline-5.14-rc2
commit 295cf156
category: bugfix
bugzilla: 55085
CVE: NA

---------------------------

Al reminds us that the usercopy API must only return complete failure
if absolutely nothing could be copied. Currently, if userspace does
something silly like giving us an unaligned pointer to Device memory,
or a size which overruns MTE tag bounds, we may fail to honour that
requirement when faulting on a multi-byte access even though a smaller
access could have succeeded.

Add a mitigation to the fixup routines to fall back to a single-byte
copy if we faulted on a larger access before anything has been written
to the destination, to guarantee making *some* forward progress. We
needn't be too concerned about the overall performance since this should
only occur when callers are doing something a bit dodgy in the first
place. Particularly broken userspace might still be able to trick
generic_perform_write() into an infinite loop by targeting write() at
an mmap() of some read-only device register where the fault-in load
succeeds but any store synchronously aborts such that copy_to_user() is
genuinely unable to make progress, but, well, don't do that...

CC: stable@vger.kernel.org
Reported-by: NChen Huang <chenhuang5@huawei.com>
Suggested-by: NAl Viro <viro@zeniv.linux.org.uk>
Reviewed-by: NCatalin Marinas <catalin.marinas@arm.com>
Signed-off-by: NRobin Murphy <robin.murphy@arm.com>
Link: https://lore.kernel.org/r/dc03d5c675731a1f24a62417dba5429ad744234e.1626098433.git.robin.murphy@arm.comSigned-off-by: NWill Deacon <will@kernel.org>

Conflicts:
	arch/arm64/lib/copy_from_user.S
	arch/arm64/lib/copy_in_user.S
	arch/arm64/lib/copy_to_user.S
Signed-off-by: NChen Huang <chenhuang5@huawei.com>
Reviewed-by: NChen Wandun <chenwandun@huawei.com>
Signed-off-by: NYang Yingliang <yangyingliang@huawei.com>
上级 c65d13f3
...@@ -39,7 +39,7 @@ ...@@ -39,7 +39,7 @@
.endm .endm
.macro ldrh1 ptr, regB, val .macro ldrh1 ptr, regB, val
uao_user_alternative 9998f, ldrh, ldtrh, \ptr, \regB, \val uao_user_alternative 9997f, ldrh, ldtrh, \ptr, \regB, \val
.endm .endm
.macro strh1 ptr, regB, val .macro strh1 ptr, regB, val
...@@ -47,7 +47,7 @@ ...@@ -47,7 +47,7 @@
.endm .endm
.macro ldr1 ptr, regB, val .macro ldr1 ptr, regB, val
uao_user_alternative 9998f, ldr, ldtr, \ptr, \regB, \val uao_user_alternative 9997f, ldr, ldtr, \ptr, \regB, \val
.endm .endm
.macro str1 ptr, regB, val .macro str1 ptr, regB, val
...@@ -55,7 +55,7 @@ ...@@ -55,7 +55,7 @@
.endm .endm
.macro ldp1 ptr, regB, regC, val .macro ldp1 ptr, regB, regC, val
uao_ldp 9998f, \ptr, \regB, \regC, \val uao_ldp 9997f, \ptr, \regB, \regC, \val
.endm .endm
.macro stp1 ptr, regB, regC, val .macro stp1 ptr, regB, regC, val
...@@ -63,9 +63,11 @@ ...@@ -63,9 +63,11 @@
.endm .endm
end .req x5 end .req x5
srcin .req x15
ENTRY(__arch_copy_from_user) ENTRY(__arch_copy_from_user)
uaccess_enable_not_uao x3, x4, x5 uaccess_enable_not_uao x3, x4, x5
add end, x0, x2 add end, x0, x2
mov srcin, x1
#include "copy_template.S" #include "copy_template.S"
uaccess_disable_not_uao x3, x4 uaccess_disable_not_uao x3, x4
mov x0, #0 // Nothing to copy mov x0, #0 // Nothing to copy
...@@ -74,6 +76,11 @@ ENDPROC(__arch_copy_from_user) ...@@ -74,6 +76,11 @@ ENDPROC(__arch_copy_from_user)
.section .fixup,"ax" .section .fixup,"ax"
.align 2 .align 2
9997: cmp dst, dstin
b.ne 9998f
// Before being absolutely sure we couldn't copy anything, try harder
USER(9998f, ldtrb tmp1w, [srcin])
strb tmp1w, [dst], #1
9998: sub x0, end, dst // bytes not copied 9998: sub x0, end, dst // bytes not copied
uaccess_disable_not_uao x3, x4 uaccess_disable_not_uao x3, x4
ret ret
......
...@@ -40,34 +40,36 @@ ...@@ -40,34 +40,36 @@
.endm .endm
.macro ldrh1 ptr, regB, val .macro ldrh1 ptr, regB, val
uao_user_alternative 9998f, ldrh, ldtrh, \ptr, \regB, \val uao_user_alternative 9997f, ldrh, ldtrh, \ptr, \regB, \val
.endm .endm
.macro strh1 ptr, regB, val .macro strh1 ptr, regB, val
uao_user_alternative 9998f, strh, sttrh, \ptr, \regB, \val uao_user_alternative 9997f, strh, sttrh, \ptr, \regB, \val
.endm .endm
.macro ldr1 ptr, regB, val .macro ldr1 ptr, regB, val
uao_user_alternative 9998f, ldr, ldtr, \ptr, \regB, \val uao_user_alternative 9997f, ldr, ldtr, \ptr, \regB, \val
.endm .endm
.macro str1 ptr, regB, val .macro str1 ptr, regB, val
uao_user_alternative 9998f, str, sttr, \ptr, \regB, \val uao_user_alternative 9997f, str, sttr, \ptr, \regB, \val
.endm .endm
.macro ldp1 ptr, regB, regC, val .macro ldp1 ptr, regB, regC, val
uao_ldp 9998f, \ptr, \regB, \regC, \val uao_ldp 9997f, \ptr, \regB, \regC, \val
.endm .endm
.macro stp1 ptr, regB, regC, val .macro stp1 ptr, regB, regC, val
uao_stp 9998f, \ptr, \regB, \regC, \val uao_stp 9997f, \ptr, \regB, \regC, \val
.endm .endm
end .req x5 end .req x5
srcin .req x15
ENTRY(__arch_copy_in_user) ENTRY(__arch_copy_in_user)
uaccess_enable_not_uao x3, x4, x5 uaccess_enable_not_uao x3, x4, x5
add end, x0, x2 add end, x0, x2
mov srcin, x1
#include "copy_template.S" #include "copy_template.S"
uaccess_disable_not_uao x3, x4 uaccess_disable_not_uao x3, x4
mov x0, #0 mov x0, #0
...@@ -76,6 +78,12 @@ ENDPROC(__arch_copy_in_user) ...@@ -76,6 +78,12 @@ ENDPROC(__arch_copy_in_user)
.section .fixup,"ax" .section .fixup,"ax"
.align 2 .align 2
9997: cmp dst, dstin
b.ne 9998f
// Before being absolutely sure we couldn't copy anything, try harder
USER(9998f, ldtrb tmp1w, [srcin])
USER(9998f, sttrb tmp1w, [dst])
add dst, dst, #1
9998: sub x0, end, dst // bytes not copied 9998: sub x0, end, dst // bytes not copied
uaccess_disable_not_uao x3, x4 uaccess_disable_not_uao x3, x4
ret ret
......
...@@ -42,7 +42,7 @@ ...@@ -42,7 +42,7 @@
.endm .endm
.macro strh1 ptr, regB, val .macro strh1 ptr, regB, val
uao_user_alternative 9998f, strh, sttrh, \ptr, \regB, \val uao_user_alternative 9997f, strh, sttrh, \ptr, \regB, \val
.endm .endm
.macro ldr1 ptr, regB, val .macro ldr1 ptr, regB, val
...@@ -50,7 +50,7 @@ ...@@ -50,7 +50,7 @@
.endm .endm
.macro str1 ptr, regB, val .macro str1 ptr, regB, val
uao_user_alternative 9998f, str, sttr, \ptr, \regB, \val uao_user_alternative 9997f, str, sttr, \ptr, \regB, \val
.endm .endm
.macro ldp1 ptr, regB, regC, val .macro ldp1 ptr, regB, regC, val
...@@ -58,13 +58,15 @@ ...@@ -58,13 +58,15 @@
.endm .endm
.macro stp1 ptr, regB, regC, val .macro stp1 ptr, regB, regC, val
uao_stp 9998f, \ptr, \regB, \regC, \val uao_stp 9997f, \ptr, \regB, \regC, \val
.endm .endm
end .req x5 end .req x5
srcin .req x15
ENTRY(__arch_copy_to_user) ENTRY(__arch_copy_to_user)
uaccess_enable_not_uao x3, x4, x5 uaccess_enable_not_uao x3, x4, x5
add end, x0, x2 add end, x0, x2
mov srcin, x1
#include "copy_template.S" #include "copy_template.S"
uaccess_disable_not_uao x3, x4 uaccess_disable_not_uao x3, x4
mov x0, #0 mov x0, #0
...@@ -92,6 +94,12 @@ ENDPROC(__arch_copy_to_user_generic_read) ...@@ -92,6 +94,12 @@ ENDPROC(__arch_copy_to_user_generic_read)
.section .fixup,"ax" .section .fixup,"ax"
.align 2 .align 2
9997: cmp dst, dstin
b.ne 9998f
// Before being absolutely sure we couldn't copy anything, try harder
ldrb tmp1w, [srcin]
USER(9998f, sttrb tmp1w, [dst])
add dst, dst, #1
9998: sub x0, end, dst // bytes not copied 9998: sub x0, end, dst // bytes not copied
uaccess_disable_not_uao x3, x4 uaccess_disable_not_uao x3, x4
ret ret
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册