提交 fef55b74 编写于 作者: J James Morse 提交者: Yongqiang Liu

arm64: entry: Add vectors that have the bhb mitigation sequences

stable inclusion
from stable-v4.19.236
commit 91429ed04ebe9dbec88f97c6fd136b722bc3f3c5
category: bugfix
bugzilla: 186460, https://gitee.com/src-openeuler/kernel/issues/I53MHA
CVE: CVE-2022-23960

--------------------------------

commit ba268923 upstream.

Some CPUs affected by Spectre-BHB need a sequence of branches, or a
firmware call to be run before any indirect branch. This needs to go
in the vectors. No CPU needs both.

While this can be patched in, it would run on all CPUs as there is a
single set of vectors. If only one part of a big/little combination is
affected, the unaffected CPUs have to run the mitigation too.

Create extra vectors that include the sequence. Subsequent patches will
allow affected CPUs to select this set of vectors. Later patches will
modify the loop count to match what the CPU requires.
Reviewed-by: NCatalin Marinas <catalin.marinas@arm.com>
Signed-off-by: NJames Morse <james.morse@arm.com>
Signed-off-by: NGreg Kroah-Hartman <gregkh@linuxfoundation.org>
Signed-off-by: NChen Jiahao <chenjiahao16@huawei.com>
Reviewed-by: NHanjun Guo <guohanjun@huawei.com>
Reviewed-by: NLiao Chang <liaochang1@huawei.com>
Signed-off-by: NYongqiang Liu <liuyongqiang13@huawei.com>
上级 4a7a38c5
...@@ -703,4 +703,29 @@ USER(\label, ic ivau, \tmp2) // invalidate I line PoU ...@@ -703,4 +703,29 @@ USER(\label, ic ivau, \tmp2) // invalidate I line PoU
.Lyield_out_\@ : .Lyield_out_\@ :
.endm .endm
.macro __mitigate_spectre_bhb_loop tmp
#ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
mov \tmp, #32
.Lspectre_bhb_loop\@:
b . + 4
subs \tmp, \tmp, #1
b.ne .Lspectre_bhb_loop\@
dsb nsh
isb
#endif /* CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY */
.endm
/* Save/restores x0-x3 to the stack */
.macro __mitigate_spectre_bhb_fw
#ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
stp x0, x1, [sp, #-16]!
stp x2, x3, [sp, #-16]!
mov w0, #ARM_SMCCC_ARCH_WORKAROUND_3
alternative_cb arm64_update_smccc_conduit
nop // Patched to SMC/HVC #0
alternative_cb_end
ldp x2, x3, [sp], #16
ldp x0, x1, [sp], #16
#endif /* CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY */
.endm
#endif /* __ASM_ASSEMBLER_H */ #endif /* __ASM_ASSEMBLER_H */
/* SPDX-License-Identifier: GPL-2.0-only */
/*
* Copyright (C) 2022 ARM Ltd.
*/
#ifndef __ASM_VECTORS_H
#define __ASM_VECTORS_H
/*
* Note: the order of this enum corresponds to two arrays in entry.S:
* tramp_vecs and __bp_harden_el1_vectors. By default the canonical
* 'full fat' vectors are used directly.
*/
enum arm64_bp_harden_el1_vectors {
#ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
/*
* Perform the BHB loop mitigation, before branching to the canonical
* vectors.
*/
EL1_VECTOR_BHB_LOOP,
/*
* Make the SMC call for firmware mitigation, before branching to the
* canonical vectors.
*/
EL1_VECTOR_BHB_FW,
#endif /* CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY */
/*
* Remap the kernel before branching to the canonical vectors.
*/
EL1_VECTOR_KPTI,
};
#endif /* __ASM_VECTORS_H */
...@@ -1070,13 +1070,26 @@ alternative_else_nop_endif ...@@ -1070,13 +1070,26 @@ alternative_else_nop_endif
sub \dst, \dst, PAGE_SIZE sub \dst, \dst, PAGE_SIZE
.endm .endm
.macro tramp_ventry, vector_start, regsize, kpti
#define BHB_MITIGATION_NONE 0
#define BHB_MITIGATION_LOOP 1
#define BHB_MITIGATION_FW 2
.macro tramp_ventry, vector_start, regsize, kpti, bhb
.align 7 .align 7
1: 1:
.if \regsize == 64 .if \regsize == 64
msr tpidrro_el0, x30 // Restored in kernel_ventry msr tpidrro_el0, x30 // Restored in kernel_ventry
.endif .endif
.if \bhb == BHB_MITIGATION_LOOP
/*
* This sequence must appear before the first indirect branch. i.e. the
* ret out of tramp_ventry. It appears here because x30 is free.
*/
__mitigate_spectre_bhb_loop x30
.endif // \bhb == BHB_MITIGATION_LOOP
.if \kpti == 1 .if \kpti == 1
/* /*
* Defend against branch aliasing attacks by pushing a dummy * Defend against branch aliasing attacks by pushing a dummy
...@@ -1101,6 +1114,15 @@ alternative_insn isb, nop, ARM64_WORKAROUND_QCOM_FALKOR_E1003 ...@@ -1101,6 +1114,15 @@ alternative_insn isb, nop, ARM64_WORKAROUND_QCOM_FALKOR_E1003
ldr x30, =vectors ldr x30, =vectors
.endif // \kpti == 1 .endif // \kpti == 1
.if \bhb == BHB_MITIGATION_FW
/*
* The firmware sequence must appear before the first indirect branch.
* i.e. the ret out of tramp_ventry. But it also needs the stack to be
* mapped to save/restore the registers the SMC clobbers.
*/
__mitigate_spectre_bhb_fw
.endif // \bhb == BHB_MITIGATION_FW
add x30, x30, #(1b - \vector_start + 4) add x30, x30, #(1b - \vector_start + 4)
ret ret
.org 1b + 128 // Did we overflow the ventry slot? .org 1b + 128 // Did we overflow the ventry slot?
...@@ -1108,6 +1130,9 @@ alternative_insn isb, nop, ARM64_WORKAROUND_QCOM_FALKOR_E1003 ...@@ -1108,6 +1130,9 @@ alternative_insn isb, nop, ARM64_WORKAROUND_QCOM_FALKOR_E1003
.macro tramp_exit, regsize = 64 .macro tramp_exit, regsize = 64
adr x30, tramp_vectors adr x30, tramp_vectors
#ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
add x30, x30, SZ_4K
#endif
msr vbar_el1, x30 msr vbar_el1, x30
ldr lr, [sp, #S_LR] ldr lr, [sp, #S_LR]
tramp_unmap_kernel x29 tramp_unmap_kernel x29
...@@ -1118,26 +1143,32 @@ alternative_insn isb, nop, ARM64_WORKAROUND_QCOM_FALKOR_E1003 ...@@ -1118,26 +1143,32 @@ alternative_insn isb, nop, ARM64_WORKAROUND_QCOM_FALKOR_E1003
eret eret
.endm .endm
.macro generate_tramp_vector, kpti .macro generate_tramp_vector, kpti, bhb
.Lvector_start\@: .Lvector_start\@:
.space 0x400 .space 0x400
.rept 4 .rept 4
tramp_ventry .Lvector_start\@, 64, \kpti tramp_ventry .Lvector_start\@, 64, \kpti, \bhb
.endr .endr
.rept 4 .rept 4
tramp_ventry .Lvector_start\@, 32, \kpti tramp_ventry .Lvector_start\@, 32, \kpti, \bhb
.endr .endr
.endm .endm
#ifdef CONFIG_UNMAP_KERNEL_AT_EL0 #ifdef CONFIG_UNMAP_KERNEL_AT_EL0
/* /*
* Exception vectors trampoline. * Exception vectors trampoline.
* The order must match __bp_harden_el1_vectors and the
* arm64_bp_harden_el1_vectors enum.
*/ */
.pushsection ".entry.tramp.text", "ax" .pushsection ".entry.tramp.text", "ax"
.align 11 .align 11
ENTRY(tramp_vectors) ENTRY(tramp_vectors)
generate_tramp_vector kpti=1 #ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
generate_tramp_vector kpti=1, bhb=BHB_MITIGATION_LOOP
generate_tramp_vector kpti=1, bhb=BHB_MITIGATION_FW
#endif /* CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY */
generate_tramp_vector kpti=1, bhb=BHB_MITIGATION_NONE
END(tramp_vectors) END(tramp_vectors)
ENTRY(tramp_exit_native) ENTRY(tramp_exit_native)
...@@ -1164,7 +1195,7 @@ __entry_tramp_data_start: ...@@ -1164,7 +1195,7 @@ __entry_tramp_data_start:
* Exception vectors for spectre mitigations on entry from EL1 when * Exception vectors for spectre mitigations on entry from EL1 when
* kpti is not in use. * kpti is not in use.
*/ */
.macro generate_el1_vector .macro generate_el1_vector, bhb
.Lvector_start\@: .Lvector_start\@:
kernel_ventry 1, sync_invalid // Synchronous EL1t kernel_ventry 1, sync_invalid // Synchronous EL1t
kernel_ventry 1, irq_invalid // IRQ EL1t kernel_ventry 1, irq_invalid // IRQ EL1t
...@@ -1177,17 +1208,21 @@ __entry_tramp_data_start: ...@@ -1177,17 +1208,21 @@ __entry_tramp_data_start:
kernel_ventry 1, error // Error EL1h kernel_ventry 1, error // Error EL1h
.rept 4 .rept 4
tramp_ventry .Lvector_start\@, 64, kpti=0 tramp_ventry .Lvector_start\@, 64, 0, \bhb
.endr .endr
.rept 4 .rept 4
tramp_ventry .Lvector_start\@, 32, kpti=0 tramp_ventry .Lvector_start\@, 32, 0, \bhb
.endr .endr
.endm .endm
/* The order must match tramp_vecs and the arm64_bp_harden_el1_vectors enum. */
.pushsection ".entry.text", "ax" .pushsection ".entry.text", "ax"
.align 11 .align 11
ENTRY(__bp_harden_el1_vectors) ENTRY(__bp_harden_el1_vectors)
generate_el1_vector #ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY
generate_el1_vector bhb=BHB_MITIGATION_LOOP
generate_el1_vector bhb=BHB_MITIGATION_FW
#endif /* CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY */
END(__bp_harden_el1_vectors) END(__bp_harden_el1_vectors)
.popsection .popsection
......
...@@ -86,6 +86,13 @@ ...@@ -86,6 +86,13 @@
ARM_SMCCC_SMC_32, \ ARM_SMCCC_SMC_32, \
0, 0x7fff) 0, 0x7fff)
#define ARM_SMCCC_ARCH_WORKAROUND_3 \
ARM_SMCCC_CALL_VAL(ARM_SMCCC_FAST_CALL, \
ARM_SMCCC_SMC_32, \
0, 0x3fff)
#define SMCCC_ARCH_WORKAROUND_RET_UNAFFECTED 1
#ifndef __ASSEMBLY__ #ifndef __ASSEMBLY__
#include <linux/linkage.h> #include <linux/linkage.h>
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册