提交 3f0116c3 编写于 作者: I Ingo Molnar

compiler/gcc4: Add quirk for 'asm goto' miscompilation bug

Fengguang Wu, Oleg Nesterov and Peter Zijlstra tracked down
a kernel crash to a GCC bug: GCC miscompiles certain 'asm goto'
constructs, as outlined here:

  http://gcc.gnu.org/bugzilla/show_bug.cgi?id=58670

Implement a workaround suggested by Jakub Jelinek.
Reported-and-tested-by: NFengguang Wu <fengguang.wu@intel.com>
Reported-by: NOleg Nesterov <oleg@redhat.com>
Reported-by: NPeter Zijlstra <a.p.zijlstra@chello.nl>
Suggested-by: NJakub Jelinek <jakub@redhat.com>
Reviewed-by: NRichard Henderson <rth@twiddle.net>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: <stable@kernel.org>
Signed-off-by: NIngo Molnar <mingo@kernel.org>
上级 2fe80d3b
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("1:\n\t" asm_volatile_goto("1:\n\t"
JUMP_LABEL_NOP "\n\t" JUMP_LABEL_NOP "\n\t"
".pushsection __jump_table, \"aw\"\n\t" ".pushsection __jump_table, \"aw\"\n\t"
".word 1b, %l[l_yes], %c0\n\t" ".word 1b, %l[l_yes], %c0\n\t"
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("1:\tnop\n\t" asm_volatile_goto("1:\tnop\n\t"
"nop\n\t" "nop\n\t"
".pushsection __jump_table, \"aw\"\n\t" ".pushsection __jump_table, \"aw\"\n\t"
WORD_INSN " 1b, %l[l_yes], %0\n\t" WORD_INSN " 1b, %l[l_yes], %0\n\t"
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("1:\n\t" asm_volatile_goto("1:\n\t"
"nop\n\t" "nop\n\t"
".pushsection __jump_table, \"aw\"\n\t" ".pushsection __jump_table, \"aw\"\n\t"
JUMP_ENTRY_TYPE "1b, %l[l_yes], %c0\n\t" JUMP_ENTRY_TYPE "1b, %l[l_yes], %c0\n\t"
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("0: brcl 0,0\n" asm_volatile_goto("0: brcl 0,0\n"
".pushsection __jump_table, \"aw\"\n" ".pushsection __jump_table, \"aw\"\n"
ASM_ALIGN "\n" ASM_ALIGN "\n"
ASM_PTR " 0b, %l[label], %0\n" ASM_PTR " 0b, %l[label], %0\n"
......
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("1:\n\t" asm_volatile_goto("1:\n\t"
"nop\n\t" "nop\n\t"
"nop\n\t" "nop\n\t"
".pushsection __jump_table, \"aw\"\n\t" ".pushsection __jump_table, \"aw\"\n\t"
......
...@@ -374,7 +374,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit) ...@@ -374,7 +374,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
* Catch too early usage of this before alternatives * Catch too early usage of this before alternatives
* have run. * have run.
*/ */
asm goto("1: jmp %l[t_warn]\n" asm_volatile_goto("1: jmp %l[t_warn]\n"
"2:\n" "2:\n"
".section .altinstructions,\"a\"\n" ".section .altinstructions,\"a\"\n"
" .long 1b - .\n" " .long 1b - .\n"
...@@ -388,7 +388,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit) ...@@ -388,7 +388,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
#endif #endif
asm goto("1: jmp %l[t_no]\n" asm_volatile_goto("1: jmp %l[t_no]\n"
"2:\n" "2:\n"
".section .altinstructions,\"a\"\n" ".section .altinstructions,\"a\"\n"
" .long 1b - .\n" " .long 1b - .\n"
...@@ -453,7 +453,7 @@ static __always_inline __pure bool _static_cpu_has_safe(u16 bit) ...@@ -453,7 +453,7 @@ static __always_inline __pure bool _static_cpu_has_safe(u16 bit)
* have. Thus, we force the jump to the widest, 4-byte, signed relative * have. Thus, we force the jump to the widest, 4-byte, signed relative
* offset even though the last would often fit in less bytes. * offset even though the last would often fit in less bytes.
*/ */
asm goto("1: .byte 0xe9\n .long %l[t_dynamic] - 2f\n" asm_volatile_goto("1: .byte 0xe9\n .long %l[t_dynamic] - 2f\n"
"2:\n" "2:\n"
".section .altinstructions,\"a\"\n" ".section .altinstructions,\"a\"\n"
" .long 1b - .\n" /* src offset */ " .long 1b - .\n" /* src offset */
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
static __always_inline bool arch_static_branch(struct static_key *key) static __always_inline bool arch_static_branch(struct static_key *key)
{ {
asm goto("1:" asm_volatile_goto("1:"
".byte " __stringify(STATIC_KEY_INIT_NOP) "\n\t" ".byte " __stringify(STATIC_KEY_INIT_NOP) "\n\t"
".pushsection __jump_table, \"aw\" \n\t" ".pushsection __jump_table, \"aw\" \n\t"
_ASM_ALIGN "\n\t" _ASM_ALIGN "\n\t"
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
static inline void __mutex_fastpath_lock(atomic_t *v, static inline void __mutex_fastpath_lock(atomic_t *v,
void (*fail_fn)(atomic_t *)) void (*fail_fn)(atomic_t *))
{ {
asm volatile goto(LOCK_PREFIX " decl %0\n" asm_volatile_goto(LOCK_PREFIX " decl %0\n"
" jns %l[exit]\n" " jns %l[exit]\n"
: : "m" (v->counter) : : "m" (v->counter)
: "memory", "cc" : "memory", "cc"
...@@ -75,7 +75,7 @@ static inline int __mutex_fastpath_lock_retval(atomic_t *count) ...@@ -75,7 +75,7 @@ static inline int __mutex_fastpath_lock_retval(atomic_t *count)
static inline void __mutex_fastpath_unlock(atomic_t *v, static inline void __mutex_fastpath_unlock(atomic_t *v,
void (*fail_fn)(atomic_t *)) void (*fail_fn)(atomic_t *))
{ {
asm volatile goto(LOCK_PREFIX " incl %0\n" asm_volatile_goto(LOCK_PREFIX " incl %0\n"
" jg %l[exit]\n" " jg %l[exit]\n"
: : "m" (v->counter) : : "m" (v->counter)
: "memory", "cc" : "memory", "cc"
......
...@@ -65,6 +65,21 @@ ...@@ -65,6 +65,21 @@
#define __visible __attribute__((externally_visible)) #define __visible __attribute__((externally_visible))
#endif #endif
/*
* GCC 'asm goto' miscompiles certain code sequences:
*
* http://gcc.gnu.org/bugzilla/show_bug.cgi?id=58670
*
* Work it around via a compiler barrier quirk suggested by Jakub Jelinek.
* Fixed in GCC 4.8.2 and later versions.
*
* (asm goto is automatically volatile - the naming reflects this.)
*/
#if GCC_VERSION <= 40801
# define asm_volatile_goto(x...) do { asm goto(x); asm (""); } while (0)
#else
# define asm_volatile_goto(x...) do { asm goto(x); } while (0)
#endif
#ifdef CONFIG_ARCH_USE_BUILTIN_BSWAP #ifdef CONFIG_ARCH_USE_BUILTIN_BSWAP
#if GCC_VERSION >= 40400 #if GCC_VERSION >= 40400
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册