提交 88072baa 编写于 作者: A Ard Biesheuvel 提交者: Zheng Zengkai

ARM: kernel: make vmlinux buildable as a PIE executable

maillist inclusion
commit 04be01192973461cdd00ab47908a78f0e2f55ef8
category: feature
feature: ARM kaslr support
bugzilla: 47952
CVE: NA

Reference: https://git.kernel.org/pub/scm/linux/kernel/git/ardb/linux.git/commit/?h=arm-kaslr-latest&id=04be01192973461cdd00ab47908a78f0e2f55ef8

Update the Kconfig RELOCATABLE depends on !JUMP_LABEL to resolve
compilation conflicts between fpic and JUMP_LABEL

-------------------------------------------------

Update the build flags and linker script to allow vmlinux to be built
as a PIE  binary, which retains relocation information about absolute
symbol references so that they can be fixed up at runtime. This will
be used for implementing KASLR,

Cc: Russell King <linux@armlinux.org.uk>
Acked-by: NNicolas Pitre <nico@linaro.org>
Signed-off-by: NArd Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: NCui GaoSheng <cuigaosheng1@huawei.com>
Reviewed-by: NXiu Jianfeng <xiujianfeng@huawei.com>
Signed-off-by: NZheng Zengkai <zhengzengkai@huawei.com>
上级 30080059
...@@ -1678,6 +1678,11 @@ config STACKPROTECTOR_PER_TASK ...@@ -1678,6 +1678,11 @@ config STACKPROTECTOR_PER_TASK
Enable this option to switch to a different method that uses a Enable this option to switch to a different method that uses a
different canary value for each task. different canary value for each task.
config RELOCATABLE
bool
depends on !XIP_KERNEL && !JUMP_LABEL
select HAVE_ARCH_PREL32_RELOCATIONS
endmenu endmenu
menu "Boot options" menu "Boot options"
......
...@@ -48,6 +48,11 @@ CHECKFLAGS += -D__ARMEL__ ...@@ -48,6 +48,11 @@ CHECKFLAGS += -D__ARMEL__
KBUILD_LDFLAGS += -EL KBUILD_LDFLAGS += -EL
endif endif
ifeq ($(CONFIG_RELOCATABLE),y)
KBUILD_CFLAGS += -fpic -include $(srctree)/include/linux/hidden.h
LDFLAGS_vmlinux += -pie -shared -Bsymbolic
endif
# #
# The Scalar Replacement of Aggregates (SRA) optimization pass in GCC 4.9 and # The Scalar Replacement of Aggregates (SRA) optimization pass in GCC 4.9 and
# later may result in code being generated that handles signed short and signed # later may result in code being generated that handles signed short and signed
......
...@@ -528,7 +528,7 @@ THUMB( orr \reg , \reg , #PSR_T_BIT ) ...@@ -528,7 +528,7 @@ THUMB( orr \reg , \reg , #PSR_T_BIT )
* mov_l - move a constant value or [relocated] address into a register * mov_l - move a constant value or [relocated] address into a register
*/ */
.macro mov_l, dst:req, imm:req .macro mov_l, dst:req, imm:req
.if __LINUX_ARM_ARCH__ < 7 .if CONFIG_RELOCATABLE == 1 || __LINUX_ARM_ARCH__ < 7
ldr \dst, =\imm ldr \dst, =\imm
.else .else
movw \dst, #:lower16:\imm movw \dst, #:lower16:\imm
......
...@@ -50,7 +50,11 @@ ...@@ -50,7 +50,11 @@
EXIT_CALL \ EXIT_CALL \
ARM_MMU_DISCARD(*(.text.fixup)) \ ARM_MMU_DISCARD(*(.text.fixup)) \
ARM_MMU_DISCARD(*(__ex_table)) \ ARM_MMU_DISCARD(*(__ex_table)) \
COMMON_DISCARDS COMMON_DISCARDS \
*(.ARM.exidx.discard.text) \
*(.interp .dynamic) \
*(.dynsym .dynstr .hash)
/* /*
* Sections that should stay zero sized, which is safer to explicitly * Sections that should stay zero sized, which is safer to explicitly
......
...@@ -115,6 +115,12 @@ SECTIONS ...@@ -115,6 +115,12 @@ SECTIONS
__smpalt_end = .; __smpalt_end = .;
} }
#endif #endif
.rel.dyn : ALIGN(8) {
__rel_begin = .;
*(.rel .rel.* .rel.dyn)
}
__rel_end = ADDR(.rel.dyn) + SIZEOF(.rel.dyn);
.init.pv_table : { .init.pv_table : {
__pv_table_begin = .; __pv_table_begin = .;
*(.pv_table) *(.pv_table)
......
...@@ -7,6 +7,7 @@ SECTIONS { ...@@ -7,6 +7,7 @@ SECTIONS {
/DISCARD/ : { /DISCARD/ : {
*(.discard) *(.discard)
*(.discard.*) *(.discard.*)
*(*.discard.*)
} }
__ksymtab 0 : { *(SORT(___ksymtab+*)) } __ksymtab 0 : { *(SORT(___ksymtab+*)) }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册