提交 49db46a6 编写于 作者: D Denys Vlasenko 提交者: Ingo Molnar

x86/asm: Introduce push/pop macros which generate CFI_REL_OFFSET and CFI_RESTORE

Sequences:

        pushl_cfi %reg
        CFI_REL_OFFSET reg, 0

and:

        popl_cfi %reg
        CFI_RESTORE reg

happen quite often. This patch adds macros which generate them.

No assembly changes (verified with objdump -dr vmlinux.o).
Signed-off-by: NDenys Vlasenko <dvlasenk@redhat.com>
Signed-off-by: NAndy Lutomirski <luto@amacapital.net>
Cc: Alexei Starovoitov <ast@plumgrid.com>
Cc: Borislav Petkov <bp@alien8.de>
Cc: Frederic Weisbecker <fweisbec@gmail.com>
Cc: H. Peter Anvin <hpa@zytor.com>
Cc: Kees Cook <keescook@chromium.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Oleg Nesterov <oleg@redhat.com>
Cc: Will Drewry <wad@chromium.org>
Link: http://lkml.kernel.org/r/1421017655-25561-1-git-send-email-dvlasenk@redhat.com
Link: http://lkml.kernel.org/r/2202eb90f175cf45d1b2d1c64dbb5676a8ad07ad.1424989793.git.luto@amacapital.netSigned-off-by: NIngo Molnar <mingo@kernel.org>
上级 69e8544c
...@@ -210,37 +210,23 @@ For 32-bit we have the following conventions - kernel is built with ...@@ -210,37 +210,23 @@ For 32-bit we have the following conventions - kernel is built with
*/ */
.macro SAVE_ALL .macro SAVE_ALL
pushl_cfi %eax pushl_cfi_reg eax
CFI_REL_OFFSET eax, 0 pushl_cfi_reg ebp
pushl_cfi %ebp pushl_cfi_reg edi
CFI_REL_OFFSET ebp, 0 pushl_cfi_reg esi
pushl_cfi %edi pushl_cfi_reg edx
CFI_REL_OFFSET edi, 0 pushl_cfi_reg ecx
pushl_cfi %esi pushl_cfi_reg ebx
CFI_REL_OFFSET esi, 0
pushl_cfi %edx
CFI_REL_OFFSET edx, 0
pushl_cfi %ecx
CFI_REL_OFFSET ecx, 0
pushl_cfi %ebx
CFI_REL_OFFSET ebx, 0
.endm .endm
.macro RESTORE_ALL .macro RESTORE_ALL
popl_cfi %ebx popl_cfi_reg ebx
CFI_RESTORE ebx popl_cfi_reg ecx
popl_cfi %ecx popl_cfi_reg edx
CFI_RESTORE ecx popl_cfi_reg esi
popl_cfi %edx popl_cfi_reg edi
CFI_RESTORE edx popl_cfi_reg ebp
popl_cfi %esi popl_cfi_reg eax
CFI_RESTORE esi
popl_cfi %edi
CFI_RESTORE edi
popl_cfi %ebp
CFI_RESTORE ebp
popl_cfi %eax
CFI_RESTORE eax
.endm .endm
#endif /* CONFIG_X86_64 */ #endif /* CONFIG_X86_64 */
......
...@@ -86,11 +86,23 @@ ...@@ -86,11 +86,23 @@
CFI_ADJUST_CFA_OFFSET 8 CFI_ADJUST_CFA_OFFSET 8
.endm .endm
.macro pushq_cfi_reg reg
pushq %\reg
CFI_ADJUST_CFA_OFFSET 8
CFI_REL_OFFSET \reg, 0
.endm
.macro popq_cfi reg .macro popq_cfi reg
popq \reg popq \reg
CFI_ADJUST_CFA_OFFSET -8 CFI_ADJUST_CFA_OFFSET -8
.endm .endm
.macro popq_cfi_reg reg
popq %\reg
CFI_ADJUST_CFA_OFFSET -8
CFI_RESTORE \reg
.endm
.macro pushfq_cfi .macro pushfq_cfi
pushfq pushfq
CFI_ADJUST_CFA_OFFSET 8 CFI_ADJUST_CFA_OFFSET 8
...@@ -116,11 +128,23 @@ ...@@ -116,11 +128,23 @@
CFI_ADJUST_CFA_OFFSET 4 CFI_ADJUST_CFA_OFFSET 4
.endm .endm
.macro pushl_cfi_reg reg
pushl %\reg
CFI_ADJUST_CFA_OFFSET 4
CFI_REL_OFFSET \reg, 0
.endm
.macro popl_cfi reg .macro popl_cfi reg
popl \reg popl \reg
CFI_ADJUST_CFA_OFFSET -4 CFI_ADJUST_CFA_OFFSET -4
.endm .endm
.macro popl_cfi_reg reg
popl %\reg
CFI_ADJUST_CFA_OFFSET -4
CFI_RESTORE \reg
.endm
.macro pushfl_cfi .macro pushfl_cfi
pushfl pushfl
CFI_ADJUST_CFA_OFFSET 4 CFI_ADJUST_CFA_OFFSET 4
......
...@@ -1234,20 +1234,13 @@ error_code: ...@@ -1234,20 +1234,13 @@ error_code:
/*CFI_REL_OFFSET es, 0*/ /*CFI_REL_OFFSET es, 0*/
pushl_cfi %ds pushl_cfi %ds
/*CFI_REL_OFFSET ds, 0*/ /*CFI_REL_OFFSET ds, 0*/
pushl_cfi %eax pushl_cfi_reg eax
CFI_REL_OFFSET eax, 0 pushl_cfi_reg ebp
pushl_cfi %ebp pushl_cfi_reg edi
CFI_REL_OFFSET ebp, 0 pushl_cfi_reg esi
pushl_cfi %edi pushl_cfi_reg edx
CFI_REL_OFFSET edi, 0 pushl_cfi_reg ecx
pushl_cfi %esi pushl_cfi_reg ebx
CFI_REL_OFFSET esi, 0
pushl_cfi %edx
CFI_REL_OFFSET edx, 0
pushl_cfi %ecx
CFI_REL_OFFSET ecx, 0
pushl_cfi %ebx
CFI_REL_OFFSET ebx, 0
cld cld
movl $(__KERNEL_PERCPU), %ecx movl $(__KERNEL_PERCPU), %ecx
movl %ecx, %fs movl %ecx, %fs
......
...@@ -13,16 +13,6 @@ ...@@ -13,16 +13,6 @@
#include <asm/alternative-asm.h> #include <asm/alternative-asm.h>
#include <asm/dwarf2.h> #include <asm/dwarf2.h>
.macro SAVE reg
pushl_cfi %\reg
CFI_REL_OFFSET \reg, 0
.endm
.macro RESTORE reg
popl_cfi %\reg
CFI_RESTORE \reg
.endm
.macro read64 reg .macro read64 reg
movl %ebx, %eax movl %ebx, %eax
movl %ecx, %edx movl %ecx, %edx
...@@ -67,10 +57,10 @@ ENDPROC(atomic64_xchg_cx8) ...@@ -67,10 +57,10 @@ ENDPROC(atomic64_xchg_cx8)
.macro addsub_return func ins insc .macro addsub_return func ins insc
ENTRY(atomic64_\func\()_return_cx8) ENTRY(atomic64_\func\()_return_cx8)
CFI_STARTPROC CFI_STARTPROC
SAVE ebp pushl_cfi_reg ebp
SAVE ebx pushl_cfi_reg ebx
SAVE esi pushl_cfi_reg esi
SAVE edi pushl_cfi_reg edi
movl %eax, %esi movl %eax, %esi
movl %edx, %edi movl %edx, %edi
...@@ -89,10 +79,10 @@ ENTRY(atomic64_\func\()_return_cx8) ...@@ -89,10 +79,10 @@ ENTRY(atomic64_\func\()_return_cx8)
10: 10:
movl %ebx, %eax movl %ebx, %eax
movl %ecx, %edx movl %ecx, %edx
RESTORE edi popl_cfi_reg edi
RESTORE esi popl_cfi_reg esi
RESTORE ebx popl_cfi_reg ebx
RESTORE ebp popl_cfi_reg ebp
ret ret
CFI_ENDPROC CFI_ENDPROC
ENDPROC(atomic64_\func\()_return_cx8) ENDPROC(atomic64_\func\()_return_cx8)
...@@ -104,7 +94,7 @@ addsub_return sub sub sbb ...@@ -104,7 +94,7 @@ addsub_return sub sub sbb
.macro incdec_return func ins insc .macro incdec_return func ins insc
ENTRY(atomic64_\func\()_return_cx8) ENTRY(atomic64_\func\()_return_cx8)
CFI_STARTPROC CFI_STARTPROC
SAVE ebx pushl_cfi_reg ebx
read64 %esi read64 %esi
1: 1:
...@@ -119,7 +109,7 @@ ENTRY(atomic64_\func\()_return_cx8) ...@@ -119,7 +109,7 @@ ENTRY(atomic64_\func\()_return_cx8)
10: 10:
movl %ebx, %eax movl %ebx, %eax
movl %ecx, %edx movl %ecx, %edx
RESTORE ebx popl_cfi_reg ebx
ret ret
CFI_ENDPROC CFI_ENDPROC
ENDPROC(atomic64_\func\()_return_cx8) ENDPROC(atomic64_\func\()_return_cx8)
...@@ -130,7 +120,7 @@ incdec_return dec sub sbb ...@@ -130,7 +120,7 @@ incdec_return dec sub sbb
ENTRY(atomic64_dec_if_positive_cx8) ENTRY(atomic64_dec_if_positive_cx8)
CFI_STARTPROC CFI_STARTPROC
SAVE ebx pushl_cfi_reg ebx
read64 %esi read64 %esi
1: 1:
...@@ -146,18 +136,18 @@ ENTRY(atomic64_dec_if_positive_cx8) ...@@ -146,18 +136,18 @@ ENTRY(atomic64_dec_if_positive_cx8)
2: 2:
movl %ebx, %eax movl %ebx, %eax
movl %ecx, %edx movl %ecx, %edx
RESTORE ebx popl_cfi_reg ebx
ret ret
CFI_ENDPROC CFI_ENDPROC
ENDPROC(atomic64_dec_if_positive_cx8) ENDPROC(atomic64_dec_if_positive_cx8)
ENTRY(atomic64_add_unless_cx8) ENTRY(atomic64_add_unless_cx8)
CFI_STARTPROC CFI_STARTPROC
SAVE ebp pushl_cfi_reg ebp
SAVE ebx pushl_cfi_reg ebx
/* these just push these two parameters on the stack */ /* these just push these two parameters on the stack */
SAVE edi pushl_cfi_reg edi
SAVE ecx pushl_cfi_reg ecx
movl %eax, %ebp movl %eax, %ebp
movl %edx, %edi movl %edx, %edi
...@@ -179,8 +169,8 @@ ENTRY(atomic64_add_unless_cx8) ...@@ -179,8 +169,8 @@ ENTRY(atomic64_add_unless_cx8)
3: 3:
addl $8, %esp addl $8, %esp
CFI_ADJUST_CFA_OFFSET -8 CFI_ADJUST_CFA_OFFSET -8
RESTORE ebx popl_cfi_reg ebx
RESTORE ebp popl_cfi_reg ebp
ret ret
4: 4:
cmpl %edx, 4(%esp) cmpl %edx, 4(%esp)
...@@ -192,7 +182,7 @@ ENDPROC(atomic64_add_unless_cx8) ...@@ -192,7 +182,7 @@ ENDPROC(atomic64_add_unless_cx8)
ENTRY(atomic64_inc_not_zero_cx8) ENTRY(atomic64_inc_not_zero_cx8)
CFI_STARTPROC CFI_STARTPROC
SAVE ebx pushl_cfi_reg ebx
read64 %esi read64 %esi
1: 1:
...@@ -209,7 +199,7 @@ ENTRY(atomic64_inc_not_zero_cx8) ...@@ -209,7 +199,7 @@ ENTRY(atomic64_inc_not_zero_cx8)
movl $1, %eax movl $1, %eax
3: 3:
RESTORE ebx popl_cfi_reg ebx
ret ret
CFI_ENDPROC CFI_ENDPROC
ENDPROC(atomic64_inc_not_zero_cx8) ENDPROC(atomic64_inc_not_zero_cx8)
...@@ -51,10 +51,8 @@ unsigned int csum_partial(const unsigned char * buff, int len, unsigned int sum) ...@@ -51,10 +51,8 @@ unsigned int csum_partial(const unsigned char * buff, int len, unsigned int sum)
*/ */
ENTRY(csum_partial) ENTRY(csum_partial)
CFI_STARTPROC CFI_STARTPROC
pushl_cfi %esi pushl_cfi_reg esi
CFI_REL_OFFSET esi, 0 pushl_cfi_reg ebx
pushl_cfi %ebx
CFI_REL_OFFSET ebx, 0
movl 20(%esp),%eax # Function arg: unsigned int sum movl 20(%esp),%eax # Function arg: unsigned int sum
movl 16(%esp),%ecx # Function arg: int len movl 16(%esp),%ecx # Function arg: int len
movl 12(%esp),%esi # Function arg: unsigned char *buff movl 12(%esp),%esi # Function arg: unsigned char *buff
...@@ -131,10 +129,8 @@ ENTRY(csum_partial) ...@@ -131,10 +129,8 @@ ENTRY(csum_partial)
jz 8f jz 8f
roll $8, %eax roll $8, %eax
8: 8:
popl_cfi %ebx popl_cfi_reg ebx
CFI_RESTORE ebx popl_cfi_reg esi
popl_cfi %esi
CFI_RESTORE esi
ret ret
CFI_ENDPROC CFI_ENDPROC
ENDPROC(csum_partial) ENDPROC(csum_partial)
...@@ -145,10 +141,8 @@ ENDPROC(csum_partial) ...@@ -145,10 +141,8 @@ ENDPROC(csum_partial)
ENTRY(csum_partial) ENTRY(csum_partial)
CFI_STARTPROC CFI_STARTPROC
pushl_cfi %esi pushl_cfi_reg esi
CFI_REL_OFFSET esi, 0 pushl_cfi_reg ebx
pushl_cfi %ebx
CFI_REL_OFFSET ebx, 0
movl 20(%esp),%eax # Function arg: unsigned int sum movl 20(%esp),%eax # Function arg: unsigned int sum
movl 16(%esp),%ecx # Function arg: int len movl 16(%esp),%ecx # Function arg: int len
movl 12(%esp),%esi # Function arg: const unsigned char *buf movl 12(%esp),%esi # Function arg: const unsigned char *buf
...@@ -255,10 +249,8 @@ ENTRY(csum_partial) ...@@ -255,10 +249,8 @@ ENTRY(csum_partial)
jz 90f jz 90f
roll $8, %eax roll $8, %eax
90: 90:
popl_cfi %ebx popl_cfi_reg ebx
CFI_RESTORE ebx popl_cfi_reg esi
popl_cfi %esi
CFI_RESTORE esi
ret ret
CFI_ENDPROC CFI_ENDPROC
ENDPROC(csum_partial) ENDPROC(csum_partial)
...@@ -298,12 +290,9 @@ ENTRY(csum_partial_copy_generic) ...@@ -298,12 +290,9 @@ ENTRY(csum_partial_copy_generic)
CFI_STARTPROC CFI_STARTPROC
subl $4,%esp subl $4,%esp
CFI_ADJUST_CFA_OFFSET 4 CFI_ADJUST_CFA_OFFSET 4
pushl_cfi %edi pushl_cfi_reg edi
CFI_REL_OFFSET edi, 0 pushl_cfi_reg esi
pushl_cfi %esi pushl_cfi_reg ebx
CFI_REL_OFFSET esi, 0
pushl_cfi %ebx
CFI_REL_OFFSET ebx, 0
movl ARGBASE+16(%esp),%eax # sum movl ARGBASE+16(%esp),%eax # sum
movl ARGBASE+12(%esp),%ecx # len movl ARGBASE+12(%esp),%ecx # len
movl ARGBASE+4(%esp),%esi # src movl ARGBASE+4(%esp),%esi # src
...@@ -412,12 +401,9 @@ DST( movb %cl, (%edi) ) ...@@ -412,12 +401,9 @@ DST( movb %cl, (%edi) )
.previous .previous
popl_cfi %ebx popl_cfi_reg ebx
CFI_RESTORE ebx popl_cfi_reg esi
popl_cfi %esi popl_cfi_reg edi
CFI_RESTORE esi
popl_cfi %edi
CFI_RESTORE edi
popl_cfi %ecx # equivalent to addl $4,%esp popl_cfi %ecx # equivalent to addl $4,%esp
ret ret
CFI_ENDPROC CFI_ENDPROC
...@@ -441,12 +427,9 @@ ENDPROC(csum_partial_copy_generic) ...@@ -441,12 +427,9 @@ ENDPROC(csum_partial_copy_generic)
ENTRY(csum_partial_copy_generic) ENTRY(csum_partial_copy_generic)
CFI_STARTPROC CFI_STARTPROC
pushl_cfi %ebx pushl_cfi_reg ebx
CFI_REL_OFFSET ebx, 0 pushl_cfi_reg edi
pushl_cfi %edi pushl_cfi_reg esi
CFI_REL_OFFSET edi, 0
pushl_cfi %esi
CFI_REL_OFFSET esi, 0
movl ARGBASE+4(%esp),%esi #src movl ARGBASE+4(%esp),%esi #src
movl ARGBASE+8(%esp),%edi #dst movl ARGBASE+8(%esp),%edi #dst
movl ARGBASE+12(%esp),%ecx #len movl ARGBASE+12(%esp),%ecx #len
...@@ -506,12 +489,9 @@ DST( movb %dl, (%edi) ) ...@@ -506,12 +489,9 @@ DST( movb %dl, (%edi) )
jmp 7b jmp 7b
.previous .previous
popl_cfi %esi popl_cfi_reg esi
CFI_RESTORE esi popl_cfi_reg edi
popl_cfi %edi popl_cfi_reg ebx
CFI_RESTORE edi
popl_cfi %ebx
CFI_RESTORE ebx
ret ret
CFI_ENDPROC CFI_ENDPROC
ENDPROC(csum_partial_copy_generic) ENDPROC(csum_partial_copy_generic)
......
...@@ -14,8 +14,8 @@ ...@@ -14,8 +14,8 @@
.macro op_safe_regs op .macro op_safe_regs op
ENTRY(\op\()_safe_regs) ENTRY(\op\()_safe_regs)
CFI_STARTPROC CFI_STARTPROC
pushq_cfi %rbx pushq_cfi_reg rbx
pushq_cfi %rbp pushq_cfi_reg rbp
movq %rdi, %r10 /* Save pointer */ movq %rdi, %r10 /* Save pointer */
xorl %r11d, %r11d /* Return value */ xorl %r11d, %r11d /* Return value */
movl (%rdi), %eax movl (%rdi), %eax
...@@ -35,8 +35,8 @@ ENTRY(\op\()_safe_regs) ...@@ -35,8 +35,8 @@ ENTRY(\op\()_safe_regs)
movl %ebp, 20(%r10) movl %ebp, 20(%r10)
movl %esi, 24(%r10) movl %esi, 24(%r10)
movl %edi, 28(%r10) movl %edi, 28(%r10)
popq_cfi %rbp popq_cfi_reg rbp
popq_cfi %rbx popq_cfi_reg rbx
ret ret
3: 3:
CFI_RESTORE_STATE CFI_RESTORE_STATE
...@@ -53,10 +53,10 @@ ENDPROC(\op\()_safe_regs) ...@@ -53,10 +53,10 @@ ENDPROC(\op\()_safe_regs)
.macro op_safe_regs op .macro op_safe_regs op
ENTRY(\op\()_safe_regs) ENTRY(\op\()_safe_regs)
CFI_STARTPROC CFI_STARTPROC
pushl_cfi %ebx pushl_cfi_reg ebx
pushl_cfi %ebp pushl_cfi_reg ebp
pushl_cfi %esi pushl_cfi_reg esi
pushl_cfi %edi pushl_cfi_reg edi
pushl_cfi $0 /* Return value */ pushl_cfi $0 /* Return value */
pushl_cfi %eax pushl_cfi %eax
movl 4(%eax), %ecx movl 4(%eax), %ecx
...@@ -80,10 +80,10 @@ ENTRY(\op\()_safe_regs) ...@@ -80,10 +80,10 @@ ENTRY(\op\()_safe_regs)
movl %esi, 24(%eax) movl %esi, 24(%eax)
movl %edi, 28(%eax) movl %edi, 28(%eax)
popl_cfi %eax popl_cfi %eax
popl_cfi %edi popl_cfi_reg edi
popl_cfi %esi popl_cfi_reg esi
popl_cfi %ebp popl_cfi_reg ebp
popl_cfi %ebx popl_cfi_reg ebx
ret ret
3: 3:
CFI_RESTORE_STATE CFI_RESTORE_STATE
......
...@@ -34,10 +34,10 @@ ...@@ -34,10 +34,10 @@
*/ */
#define save_common_regs \ #define save_common_regs \
pushl_cfi %ecx; CFI_REL_OFFSET ecx, 0 pushl_cfi_reg ecx
#define restore_common_regs \ #define restore_common_regs \
popl_cfi %ecx; CFI_RESTORE ecx popl_cfi_reg ecx
/* Avoid uglifying the argument copying x86-64 needs to do. */ /* Avoid uglifying the argument copying x86-64 needs to do. */
.macro movq src, dst .macro movq src, dst
...@@ -64,22 +64,22 @@ ...@@ -64,22 +64,22 @@
*/ */
#define save_common_regs \ #define save_common_regs \
pushq_cfi %rdi; CFI_REL_OFFSET rdi, 0; \ pushq_cfi_reg rdi; \
pushq_cfi %rsi; CFI_REL_OFFSET rsi, 0; \ pushq_cfi_reg rsi; \
pushq_cfi %rcx; CFI_REL_OFFSET rcx, 0; \ pushq_cfi_reg rcx; \
pushq_cfi %r8; CFI_REL_OFFSET r8, 0; \ pushq_cfi_reg r8; \
pushq_cfi %r9; CFI_REL_OFFSET r9, 0; \ pushq_cfi_reg r9; \
pushq_cfi %r10; CFI_REL_OFFSET r10, 0; \ pushq_cfi_reg r10; \
pushq_cfi %r11; CFI_REL_OFFSET r11, 0 pushq_cfi_reg r11
#define restore_common_regs \ #define restore_common_regs \
popq_cfi %r11; CFI_RESTORE r11; \ popq_cfi_reg r11; \
popq_cfi %r10; CFI_RESTORE r10; \ popq_cfi_reg r10; \
popq_cfi %r9; CFI_RESTORE r9; \ popq_cfi_reg r9; \
popq_cfi %r8; CFI_RESTORE r8; \ popq_cfi_reg r8; \
popq_cfi %rcx; CFI_RESTORE rcx; \ popq_cfi_reg rcx; \
popq_cfi %rsi; CFI_RESTORE rsi; \ popq_cfi_reg rsi; \
popq_cfi %rdi; CFI_RESTORE rdi popq_cfi_reg rdi
#endif #endif
...@@ -87,12 +87,10 @@ ...@@ -87,12 +87,10 @@
ENTRY(call_rwsem_down_read_failed) ENTRY(call_rwsem_down_read_failed)
CFI_STARTPROC CFI_STARTPROC
save_common_regs save_common_regs
__ASM_SIZE(push,_cfi) %__ASM_REG(dx) __ASM_SIZE(push,_cfi_reg) __ASM_REG(dx)
CFI_REL_OFFSET __ASM_REG(dx), 0
movq %rax,%rdi movq %rax,%rdi
call rwsem_down_read_failed call rwsem_down_read_failed
__ASM_SIZE(pop,_cfi) %__ASM_REG(dx) __ASM_SIZE(pop,_cfi_reg) __ASM_REG(dx)
CFI_RESTORE __ASM_REG(dx)
restore_common_regs restore_common_regs
ret ret
CFI_ENDPROC CFI_ENDPROC
...@@ -124,12 +122,10 @@ ENDPROC(call_rwsem_wake) ...@@ -124,12 +122,10 @@ ENDPROC(call_rwsem_wake)
ENTRY(call_rwsem_downgrade_wake) ENTRY(call_rwsem_downgrade_wake)
CFI_STARTPROC CFI_STARTPROC
save_common_regs save_common_regs
__ASM_SIZE(push,_cfi) %__ASM_REG(dx) __ASM_SIZE(push,_cfi_reg) __ASM_REG(dx)
CFI_REL_OFFSET __ASM_REG(dx), 0
movq %rax,%rdi movq %rax,%rdi
call rwsem_downgrade_wake call rwsem_downgrade_wake
__ASM_SIZE(pop,_cfi) %__ASM_REG(dx) __ASM_SIZE(pop,_cfi_reg) __ASM_REG(dx)
CFI_RESTORE __ASM_REG(dx)
restore_common_regs restore_common_regs
ret ret
CFI_ENDPROC CFI_ENDPROC
......
...@@ -13,12 +13,9 @@ ...@@ -13,12 +13,9 @@
.globl \name .globl \name
\name: \name:
CFI_STARTPROC CFI_STARTPROC
pushl_cfi %eax pushl_cfi_reg eax
CFI_REL_OFFSET eax, 0 pushl_cfi_reg ecx
pushl_cfi %ecx pushl_cfi_reg edx
CFI_REL_OFFSET ecx, 0
pushl_cfi %edx
CFI_REL_OFFSET edx, 0
.if \put_ret_addr_in_eax .if \put_ret_addr_in_eax
/* Place EIP in the arg1 */ /* Place EIP in the arg1 */
...@@ -26,12 +23,9 @@ ...@@ -26,12 +23,9 @@
.endif .endif
call \func call \func
popl_cfi %edx popl_cfi_reg edx
CFI_RESTORE edx popl_cfi_reg ecx
popl_cfi %ecx popl_cfi_reg eax
CFI_RESTORE ecx
popl_cfi %eax
CFI_RESTORE eax
ret ret
CFI_ENDPROC CFI_ENDPROC
_ASM_NOKPROBE(\name) _ASM_NOKPROBE(\name)
......
...@@ -17,24 +17,15 @@ ...@@ -17,24 +17,15 @@
CFI_STARTPROC CFI_STARTPROC
/* this one pushes 9 elems, the next one would be %rIP */ /* this one pushes 9 elems, the next one would be %rIP */
pushq_cfi %rdi pushq_cfi_reg rdi
CFI_REL_OFFSET rdi, 0 pushq_cfi_reg rsi
pushq_cfi %rsi pushq_cfi_reg rdx
CFI_REL_OFFSET rsi, 0 pushq_cfi_reg rcx
pushq_cfi %rdx pushq_cfi_reg rax
CFI_REL_OFFSET rdx, 0 pushq_cfi_reg r8
pushq_cfi %rcx pushq_cfi_reg r9
CFI_REL_OFFSET rcx, 0 pushq_cfi_reg r10
pushq_cfi %rax pushq_cfi_reg r11
CFI_REL_OFFSET rax, 0
pushq_cfi %r8
CFI_REL_OFFSET r8, 0
pushq_cfi %r9
CFI_REL_OFFSET r9, 0
pushq_cfi %r10
CFI_REL_OFFSET r10, 0
pushq_cfi %r11
CFI_REL_OFFSET r11, 0
.if \put_ret_addr_in_rdi .if \put_ret_addr_in_rdi
/* 9*8(%rsp) is return addr on stack */ /* 9*8(%rsp) is return addr on stack */
...@@ -69,24 +60,15 @@ ...@@ -69,24 +60,15 @@
CFI_STARTPROC CFI_STARTPROC
CFI_ADJUST_CFA_OFFSET 9*8 CFI_ADJUST_CFA_OFFSET 9*8
restore: restore:
popq_cfi %r11 popq_cfi_reg r11
CFI_RESTORE r11 popq_cfi_reg r10
popq_cfi %r10 popq_cfi_reg r9
CFI_RESTORE r10 popq_cfi_reg r8
popq_cfi %r9 popq_cfi_reg rax
CFI_RESTORE r9 popq_cfi_reg rcx
popq_cfi %r8 popq_cfi_reg rdx
CFI_RESTORE r8 popq_cfi_reg rsi
popq_cfi %rax popq_cfi_reg rdi
CFI_RESTORE rax
popq_cfi %rcx
CFI_RESTORE rcx
popq_cfi %rdx
CFI_RESTORE rdx
popq_cfi %rsi
CFI_RESTORE rsi
popq_cfi %rdi
CFI_RESTORE rdi
ret ret
CFI_ENDPROC CFI_ENDPROC
_ASM_NOKPROBE(restore) _ASM_NOKPROBE(restore)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册