提交 672999cf 编写于 作者: Y Youling Tang 提交者: Huacai Chen

LoongArch: extable: Add a dedicated uaccess handler

Inspired by commit 2e77a62c("arm64: extable: add a dedicated uaccess
handler"), do similar to LoongArch to add a dedicated uaccess exception
handler to update registers in exception context and subsequently return
back into the function which faulted, so we remove the need for fixups
specialized to each faulting instruction.

Add gpr-num.h here because we need to map the same GPR names to integer
constants, so that we can use this to build meta-data for the exception
fixups.

The compiler treats gpr 0 as zero rather than $r0, so set it separately
to .L__gpr_num_zero, otherwise the following assembly error will occurs:

{standard input}: Assembler messages:
{standard input}:1074: Error: invalid operands (*UND* and *ABS* sections) for `<<'
{standard input}:1160: Error: invalid operands (*UND* and *ABS* sections) for `<<'
make[1]: *** [scripts/Makefile.build:249: fs/fcntl.o] Error 1
Signed-off-by: NYouling Tang <tangyouling@loongson.cn>
Signed-off-by: NHuacai Chen <chenhuacai@loongson.cn>
上级 26bc8244
......@@ -4,6 +4,7 @@
#define EX_TYPE_NONE 0
#define EX_TYPE_FIXUP 1
#define EX_TYPE_UACCESS_ERR_ZERO 2
#ifdef __ASSEMBLY__
......@@ -24,6 +25,7 @@
#include <linux/bits.h>
#include <linux/stringify.h>
#include <asm/gpr-num.h>
#define __ASM_EXTABLE_RAW(insn, fixup, type, data) \
".pushsection __ex_table, \"a\"\n" \
......@@ -37,6 +39,26 @@
#define _ASM_EXTABLE(insn, fixup) \
__ASM_EXTABLE_RAW(#insn, #fixup, __stringify(EX_TYPE_FIXUP), "0")
#define EX_DATA_REG_ERR_SHIFT 0
#define EX_DATA_REG_ERR GENMASK(4, 0)
#define EX_DATA_REG_ZERO_SHIFT 5
#define EX_DATA_REG_ZERO GENMASK(9, 5)
#define EX_DATA_REG(reg, gpr) \
"((.L__gpr_num_" #gpr ") << " __stringify(EX_DATA_REG_##reg##_SHIFT) ")"
#define _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, zero) \
__DEFINE_ASM_GPR_NUMS \
__ASM_EXTABLE_RAW(#insn, #fixup, \
__stringify(EX_TYPE_UACCESS_ERR_ZERO), \
"(" \
EX_DATA_REG(ERR, err) " | " \
EX_DATA_REG(ZERO, zero) \
")")
#define _ASM_EXTABLE_UACCESS_ERR(insn, fixup, err) \
_ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, zero)
#endif /* __ASSEMBLY__ */
#endif /* __ASM_ASM_EXTABLE_H */
......@@ -19,16 +19,11 @@
"2: sc.w $t0, %2 \n" \
" beqz $t0, 1b \n" \
"3: \n" \
" .section .fixup,\"ax\" \n" \
"4: li.w %0, %6 \n" \
" b 3b \n" \
" .previous \n" \
_ASM_EXTABLE(1b, 4b) \
_ASM_EXTABLE(2b, 4b) \
_ASM_EXTABLE_UACCESS_ERR(1b, 3b, %0) \
_ASM_EXTABLE_UACCESS_ERR(2b, 3b, %0) \
: "=r" (ret), "=&r" (oldval), \
"=ZC" (*uaddr) \
: "0" (0), "ZC" (*uaddr), "Jr" (oparg), \
"i" (-EFAULT) \
: "0" (0), "ZC" (*uaddr), "Jr" (oparg) \
: "memory", "t0"); \
}
......@@ -85,15 +80,10 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, u32 newv
" beqz $t0, 1b \n"
"3: \n"
__WEAK_LLSC_MB
" .section .fixup,\"ax\" \n"
"4: li.d %0, %6 \n"
" b 3b \n"
" .previous \n"
_ASM_EXTABLE(1b, 4b)
_ASM_EXTABLE(2b, 4b)
_ASM_EXTABLE_UACCESS_ERR(1b, 3b, %0)
_ASM_EXTABLE_UACCESS_ERR(2b, 3b, %0)
: "+r" (ret), "=&r" (val), "=ZC" (*uaddr)
: "ZC" (*uaddr), "Jr" (oldval), "Jr" (newval),
"i" (-EFAULT)
: "ZC" (*uaddr), "Jr" (oldval), "Jr" (newval)
: "memory", "t0");
*uval = val;
......
/* SPDX-License-Identifier: GPL-2.0-only */
#ifndef __ASM_GPR_NUM_H
#define __ASM_GPR_NUM_H
#ifdef __ASSEMBLY__
.equ .L__gpr_num_zero, 0
.irp num,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31
.equ .L__gpr_num_$r\num, \num
.endr
#else /* __ASSEMBLY__ */
#define __DEFINE_ASM_GPR_NUMS \
" .equ .L__gpr_num_zero, 0\n" \
" .irp num,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31\n" \
" .equ .L__gpr_num_$r\\num, \\num\n" \
" .endr\n" \
#endif /* __ASSEMBLY__ */
#endif /* __ASM_GPR_NUM_H */
......@@ -161,14 +161,9 @@ do { \
__asm__ __volatile__( \
"1: " insn " %1, %2 \n" \
"2: \n" \
" .section .fixup,\"ax\" \n" \
"3: li.w %0, %3 \n" \
" move %1, $zero \n" \
" b 2b \n" \
" .previous \n" \
_ASM_EXTABLE(1b, 3b) \
_ASM_EXTABLE_UACCESS_ERR_ZERO(1b, 2b, %0, %1) \
: "+r" (__gu_err), "=r" (__gu_tmp) \
: "m" (__m(ptr)), "i" (-EFAULT)); \
: "m" (__m(ptr))); \
\
(val) = (__typeof__(*(ptr))) __gu_tmp; \
}
......@@ -191,13 +186,9 @@ do { \
__asm__ __volatile__( \
"1: " insn " %z2, %1 # __put_user_asm\n" \
"2: \n" \
" .section .fixup,\"ax\" \n" \
"3: li.w %0, %3 \n" \
" b 2b \n" \
" .previous \n" \
_ASM_EXTABLE(1b, 3b) \
_ASM_EXTABLE_UACCESS_ERR(1b, 2b, %0) \
: "+r" (__pu_err), "=m" (__m(ptr)) \
: "Jr" (__pu_val), "i" (-EFAULT)); \
: "Jr" (__pu_val)); \
}
#define __get_kernel_nofault(dst, src, type, err_label) \
......
......@@ -2,6 +2,7 @@
/*
* Copyright (C) 2020-2022 Loongson Technology Corporation Limited
*/
#include <linux/bitfield.h>
#include <linux/extable.h>
#include <linux/uaccess.h>
#include <asm/asm-extable.h>
......@@ -13,6 +14,13 @@ get_ex_fixup(const struct exception_table_entry *ex)
return ((unsigned long)&ex->fixup + ex->fixup);
}
static inline void regs_set_gpr(struct pt_regs *regs,
unsigned int offset, unsigned long val)
{
if (offset && offset <= MAX_REG_OFFSET)
*(unsigned long *)((unsigned long)regs + offset) = val;
}
static bool ex_handler_fixup(const struct exception_table_entry *ex,
struct pt_regs *regs)
{
......@@ -21,6 +29,18 @@ static bool ex_handler_fixup(const struct exception_table_entry *ex,
return true;
}
static bool ex_handler_uaccess_err_zero(const struct exception_table_entry *ex,
struct pt_regs *regs)
{
int reg_err = FIELD_GET(EX_DATA_REG_ERR, ex->data);
int reg_zero = FIELD_GET(EX_DATA_REG_ZERO, ex->data);
regs_set_gpr(regs, reg_err * sizeof(unsigned long), -EFAULT);
regs_set_gpr(regs, reg_zero * sizeof(unsigned long), 0);
regs->csr_era = get_ex_fixup(ex);
return true;
}
bool fixup_exception(struct pt_regs *regs)
{
......@@ -33,6 +53,8 @@ bool fixup_exception(struct pt_regs *regs)
switch (ex->type) {
case EX_TYPE_FIXUP:
return ex_handler_fixup(ex, regs);
case EX_TYPE_UACCESS_ERR_ZERO:
return ex_handler_uaccess_err_zero(ex, regs);
}
BUG();
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册